summaryrefslogtreecommitdiffstats
path: root/scripts/lib
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib')
-rw-r--r--scripts/lib/argparse_oe.py182
-rw-r--r--scripts/lib/build_perf/__init__.py24
-rw-r--r--scripts/lib/build_perf/html.py12
-rw-r--r--scripts/lib/build_perf/html/measurement_chart.html168
-rw-r--r--scripts/lib/build_perf/html/report.html408
-rw-r--r--scripts/lib/build_perf/report.py342
-rw-r--r--scripts/lib/build_perf/scrape-html-report.js56
-rw-r--r--scripts/lib/buildstats.py368
-rw-r--r--scripts/lib/checklayer/__init__.py466
-rw-r--r--scripts/lib/checklayer/case.py9
-rw-r--r--scripts/lib/checklayer/cases/__init__.py0
-rw-r--r--scripts/lib/checklayer/cases/bsp.py206
-rw-r--r--scripts/lib/checklayer/cases/common.py135
-rw-r--r--scripts/lib/checklayer/cases/distro.py28
-rw-r--r--scripts/lib/checklayer/context.py17
-rw-r--r--scripts/lib/devtool/__init__.py404
-rw-r--r--scripts/lib/devtool/build.py92
-rw-r--r--scripts/lib/devtool/build_image.py164
-rw-r--r--scripts/lib/devtool/build_sdk.py48
-rw-r--r--scripts/lib/devtool/deploy.py378
-rw-r--r--scripts/lib/devtool/export.py109
-rw-r--r--scripts/lib/devtool/ide_plugins/__init__.py282
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py463
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_none.py53
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py1009
-rw-r--r--scripts/lib/devtool/import.py134
-rw-r--r--scripts/lib/devtool/menuconfig.py76
-rw-r--r--scripts/lib/devtool/package.py50
-rw-r--r--scripts/lib/devtool/runqemu.py64
-rw-r--r--scripts/lib/devtool/sdk.py330
-rw-r--r--scripts/lib/devtool/search.py109
-rw-r--r--scripts/lib/devtool/standard.py2396
-rw-r--r--scripts/lib/devtool/upgrade.py715
-rw-r--r--scripts/lib/devtool/utilcmds.py242
-rw-r--r--scripts/lib/recipetool/__init__.py0
-rw-r--r--scripts/lib/recipetool/append.py477
-rw-r--r--scripts/lib/recipetool/create.py1439
-rw-r--r--scripts/lib/recipetool/create_buildsys.py875
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py1124
-rw-r--r--scripts/lib/recipetool/create_go.py777
-rw-r--r--scripts/lib/recipetool/create_kernel.py89
-rw-r--r--scripts/lib/recipetool/create_kmod.py142
-rw-r--r--scripts/lib/recipetool/create_npm.py299
-rw-r--r--scripts/lib/recipetool/edit.py44
-rw-r--r--scripts/lib/recipetool/licenses.csv37
-rw-r--r--scripts/lib/recipetool/newappend.py79
-rw-r--r--scripts/lib/recipetool/setvar.py66
-rw-r--r--scripts/lib/resulttool/__init__.py0
-rw-r--r--scripts/lib/resulttool/junit.py77
-rw-r--r--scripts/lib/resulttool/log.py107
-rwxr-xr-xscripts/lib/resulttool/manualexecution.py235
-rw-r--r--scripts/lib/resulttool/merge.py46
-rw-r--r--scripts/lib/resulttool/regression.py450
-rw-r--r--scripts/lib/resulttool/report.py315
-rw-r--r--scripts/lib/resulttool/resultutils.py274
-rw-r--r--scripts/lib/resulttool/store.py125
-rw-r--r--scripts/lib/resulttool/template/test_report_full_text.txt79
-rw-r--r--scripts/lib/scriptpath.py32
-rw-r--r--scripts/lib/scriptutils.py274
-rw-r--r--scripts/lib/wic/__init__.py10
-rw-r--r--scripts/lib/wic/canned-wks/common.wks.inc3
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg27
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-bootloader-config.wks8
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-gpt.wks10
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks23
-rw-r--r--scripts/lib/wic/canned-wks/directdisk.wks8
-rw-r--r--scripts/lib/wic/canned-wks/efi-bootdisk.wks.in3
-rw-r--r--scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in3
-rw-r--r--scripts/lib/wic/canned-wks/mkefidisk.wks11
-rw-r--r--scripts/lib/wic/canned-wks/mkhybridiso.wks7
-rw-r--r--scripts/lib/wic/canned-wks/qemuloongarch.wks3
-rw-r--r--scripts/lib/wic/canned-wks/qemuriscv.wks3
-rw-r--r--scripts/lib/wic/canned-wks/qemux86-directdisk.wks8
-rw-r--r--scripts/lib/wic/canned-wks/sdimage-bootpart.wks6
-rw-r--r--scripts/lib/wic/canned-wks/systemd-bootdisk.wks11
-rw-r--r--scripts/lib/wic/engine.py644
-rw-r--r--scripts/lib/wic/filemap.py583
-rw-r--r--scripts/lib/wic/help.py1180
-rw-r--r--scripts/lib/wic/ksparser.py298
-rw-r--r--scripts/lib/wic/misc.py266
-rw-r--r--scripts/lib/wic/partition.py551
-rw-r--r--scripts/lib/wic/pluginbase.py144
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py704
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-biosplusefi.py213
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-efi.py435
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-partition.py162
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-pcbios.py209
-rw-r--r--scripts/lib/wic/plugins/source/empty.py89
-rw-r--r--scripts/lib/wic/plugins/source/isoimage-isohybrid.py463
-rw-r--r--scripts/lib/wic/plugins/source/rawcopy.py115
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py236
91 files changed, 0 insertions, 23437 deletions
diff --git a/scripts/lib/argparse_oe.py b/scripts/lib/argparse_oe.py
deleted file mode 100644
index 176b732bbc..0000000000
--- a/scripts/lib/argparse_oe.py
+++ /dev/null
@@ -1,182 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import sys
8import argparse
9from collections import defaultdict, OrderedDict
10
11class ArgumentUsageError(Exception):
12 """Exception class you can raise (and catch) in order to show the help"""
13 def __init__(self, message, subcommand=None):
14 self.message = message
15 self.subcommand = subcommand
16
17class ArgumentParser(argparse.ArgumentParser):
18 """Our own version of argparse's ArgumentParser"""
19 def __init__(self, *args, **kwargs):
20 kwargs.setdefault('formatter_class', OeHelpFormatter)
21 self._subparser_groups = OrderedDict()
22 super(ArgumentParser, self).__init__(*args, **kwargs)
23 self._positionals.title = 'arguments'
24 self._optionals.title = 'options'
25
26 def error(self, message):
27 """error(message: string)
28
29 Prints a help message incorporating the message to stderr and
30 exits.
31 """
32 self._print_message('%s: error: %s\n' % (self.prog, message), sys.stderr)
33 self.print_help(sys.stderr)
34 sys.exit(2)
35
36 def error_subcommand(self, message, subcommand):
37 if subcommand:
38 action = self._get_subparser_action()
39 try:
40 subparser = action._name_parser_map[subcommand]
41 except KeyError:
42 self.error('no subparser for name "%s"' % subcommand)
43 else:
44 subparser.error(message)
45
46 self.error(message)
47
48 def add_subparsers(self, *args, **kwargs):
49 if 'dest' not in kwargs:
50 kwargs['dest'] = '_subparser_name'
51
52 ret = super(ArgumentParser, self).add_subparsers(*args, **kwargs)
53 # Need a way of accessing the parent parser
54 ret._parent_parser = self
55 # Ensure our class gets instantiated
56 ret._parser_class = ArgumentSubParser
57 # Hacky way of adding a method to the subparsers object
58 ret.add_subparser_group = self.add_subparser_group
59 return ret
60
61 def add_subparser_group(self, groupname, groupdesc, order=0):
62 self._subparser_groups[groupname] = (groupdesc, order)
63
64 def parse_args(self, args=None, namespace=None):
65 """Parse arguments, using the correct subparser to show the error."""
66 args, argv = self.parse_known_args(args, namespace)
67 if argv:
68 message = 'unrecognized arguments: %s' % ' '.join(argv)
69 if self._subparsers:
70 subparser = self._get_subparser(args)
71 subparser.error(message)
72 else:
73 self.error(message)
74 sys.exit(2)
75 return args
76
77 def _get_subparser(self, args):
78 action = self._get_subparser_action()
79 if action.dest == argparse.SUPPRESS:
80 self.error('cannot get subparser, the subparser action dest is suppressed')
81
82 name = getattr(args, action.dest)
83 try:
84 return action._name_parser_map[name]
85 except KeyError:
86 self.error('no subparser for name "%s"' % name)
87
88 def _get_subparser_action(self):
89 if not self._subparsers:
90 self.error('cannot return the subparser action, no subparsers added')
91
92 for action in self._subparsers._group_actions:
93 if isinstance(action, argparse._SubParsersAction):
94 return action
95
96
97class ArgumentSubParser(ArgumentParser):
98 def __init__(self, *args, **kwargs):
99 if 'group' in kwargs:
100 self._group = kwargs.pop('group')
101 if 'order' in kwargs:
102 self._order = kwargs.pop('order')
103 super(ArgumentSubParser, self).__init__(*args, **kwargs)
104
105 def parse_known_args(self, args=None, namespace=None):
106 # This works around argparse not handling optional positional arguments being
107 # intermixed with other options. A pretty horrible hack, but we're not left
108 # with much choice given that the bug in argparse exists and it's difficult
109 # to subclass.
110 # Borrowed from http://stackoverflow.com/questions/20165843/argparse-how-to-handle-variable-number-of-arguments-nargs
111 # with an extra workaround (in format_help() below) for the positional
112 # arguments disappearing from the --help output, as well as structural tweaks.
113 # Originally simplified from http://bugs.python.org/file30204/test_intermixed.py
114 positionals = self._get_positional_actions()
115 for action in positionals:
116 # deactivate positionals
117 action.save_nargs = action.nargs
118 action.nargs = 0
119
120 namespace, remaining_args = super(ArgumentSubParser, self).parse_known_args(args, namespace)
121 for action in positionals:
122 # remove the empty positional values from namespace
123 if hasattr(namespace, action.dest):
124 delattr(namespace, action.dest)
125 for action in positionals:
126 action.nargs = action.save_nargs
127 # parse positionals
128 namespace, extras = super(ArgumentSubParser, self).parse_known_args(remaining_args, namespace)
129 return namespace, extras
130
131 def format_help(self):
132 # Quick, restore the positionals!
133 positionals = self._get_positional_actions()
134 for action in positionals:
135 if hasattr(action, 'save_nargs'):
136 action.nargs = action.save_nargs
137 return super(ArgumentParser, self).format_help()
138
139
140class OeHelpFormatter(argparse.HelpFormatter):
141 def _format_action(self, action):
142 if hasattr(action, '_get_subactions'):
143 # subcommands list
144 groupmap = defaultdict(list)
145 ordermap = {}
146 subparser_groups = action._parent_parser._subparser_groups
147 groups = sorted(subparser_groups.keys(), key=lambda item: subparser_groups[item][1], reverse=True)
148 for subaction in self._iter_indented_subactions(action):
149 parser = action._name_parser_map[subaction.dest]
150 group = getattr(parser, '_group', None)
151 groupmap[group].append(subaction)
152 if group not in groups:
153 groups.append(group)
154 order = getattr(parser, '_order', 0)
155 ordermap[subaction.dest] = order
156
157 lines = []
158 if len(groupmap) > 1:
159 groupindent = ' '
160 else:
161 groupindent = ''
162 for group in groups:
163 subactions = groupmap[group]
164 if not subactions:
165 continue
166 if groupindent:
167 if not group:
168 group = 'other'
169 groupdesc = subparser_groups.get(group, (group, 0))[0]
170 lines.append(' %s:' % groupdesc)
171 for subaction in sorted(subactions, key=lambda item: ordermap[item.dest], reverse=True):
172 lines.append('%s%s' % (groupindent, self._format_action(subaction).rstrip()))
173 return '\n'.join(lines)
174 else:
175 return super(OeHelpFormatter, self)._format_action(action)
176
177def int_positive(value):
178 ivalue = int(value)
179 if ivalue <= 0:
180 raise argparse.ArgumentTypeError(
181 "%s is not a positive int value" % value)
182 return ivalue
diff --git a/scripts/lib/build_perf/__init__.py b/scripts/lib/build_perf/__init__.py
deleted file mode 100644
index dcbb78042d..0000000000
--- a/scripts/lib/build_perf/__init__.py
+++ /dev/null
@@ -1,24 +0,0 @@
1#
2# Copyright (c) 2017, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Build performance test library functions"""
7
8def print_table(rows, row_fmt=None):
9 """Print data table"""
10 if not rows:
11 return
12 if not row_fmt:
13 row_fmt = ['{:{wid}} '] * len(rows[0])
14
15 # Go through the data to get maximum cell widths
16 num_cols = len(row_fmt)
17 col_widths = [0] * num_cols
18 for row in rows:
19 for i, val in enumerate(row):
20 col_widths[i] = max(col_widths[i], len(str(val)))
21
22 for row in rows:
23 print(*[row_fmt[i].format(col, wid=col_widths[i]) for i, col in enumerate(row)])
24
diff --git a/scripts/lib/build_perf/html.py b/scripts/lib/build_perf/html.py
deleted file mode 100644
index d1273c9c50..0000000000
--- a/scripts/lib/build_perf/html.py
+++ /dev/null
@@ -1,12 +0,0 @@
1#
2# Copyright (c) 2017, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Helper module for HTML reporting"""
7from jinja2 import Environment, PackageLoader
8
9
10env = Environment(loader=PackageLoader('build_perf', 'html'))
11
12template = env.get_template('report.html')
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html
deleted file mode 100644
index 86435273cf..0000000000
--- a/scripts/lib/build_perf/html/measurement_chart.html
+++ /dev/null
@@ -1,168 +0,0 @@
1<script type="module">
2 // Get raw data
3 const rawData = [
4 {% for sample in measurement.samples %}
5 [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'],
6 {% endfor %}
7 ];
8
9 const convertToMinute = (time) => {
10 return time[0]*60 + time[1] + time[2]/60 + time[3]/3600;
11 }
12
13 // Update value format to either minutes or leave as size value
14 const updateValue = (value) => {
15 // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds]
16 return Array.isArray(value) ? convertToMinute(value) : value
17 }
18
19 // Convert raw data to the format: [time, value]
20 const data = rawData.map(([commit, value, time]) => {
21 return [
22 // The Date object takes values in milliseconds rather than seconds. So to use a Unix timestamp we have to multiply it by 1000.
23 new Date(time * 1000).getTime(),
24 // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds]
25 updateValue(value)
26 ]
27 });
28
29 const commitCountList = rawData.map(([commit, value, time]) => {
30 return commit
31 });
32
33 const commitCountData = rawData.map(([commit, value, time]) => {
34 return updateValue(value)
35 });
36
37 // Set chart options
38 const option_start_time = {
39 tooltip: {
40 trigger: 'axis',
41 enterable: true,
42 position: function (point, params, dom, rect, size) {
43 return [point[0], '0%'];
44 },
45 formatter: function (param) {
46 const value = param[0].value[1]
47 const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value)
48 const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)');
49
50 // Add commit hash to the tooltip as a link
51 const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}`
52 if ('{{ measurement.value_type.quantity }}' == 'time') {
53 const hours = Math.floor(value/60)
54 const minutes = Math.floor(value % 60)
55 const seconds = Math.floor((value * 60) % 60)
56 return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
57 }
58 return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
59 ;}
60 },
61 xAxis: {
62 type: 'time',
63 },
64 yAxis: {
65 name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB',
66 type: 'value',
67 min: function(value) {
68 return Math.round(value.min - 0.5);
69 },
70 max: function(value) {
71 return Math.round(value.max + 0.5);
72 }
73 },
74 dataZoom: [
75 {
76 type: 'slider',
77 xAxisIndex: 0,
78 filterMode: 'none'
79 },
80 ],
81 series: [
82 {
83 name: '{{ measurement.value_type.quantity }}',
84 type: 'line',
85 symbol: 'none',
86 data: data
87 }
88 ]
89 };
90
91 const option_commit_count = {
92 tooltip: {
93 trigger: 'axis',
94 enterable: true,
95 position: function (point, params, dom, rect, size) {
96 return [point[0], '0%'];
97 },
98 formatter: function (param) {
99 const value = param[0].value
100 const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value)
101 const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)');
102 // Add commit hash to the tooltip as a link
103 const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}`
104 if ('{{ measurement.value_type.quantity }}' == 'time') {
105 const hours = Math.floor(value/60)
106 const minutes = Math.floor(value % 60)
107 const seconds = Math.floor((value * 60) % 60)
108 return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
109 }
110 return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
111 ;}
112 },
113 xAxis: {
114 name: 'Commit count',
115 type: 'category',
116 data: commitCountList
117 },
118 yAxis: {
119 name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB',
120 type: 'value',
121 min: function(value) {
122 return Math.round(value.min - 0.5);
123 },
124 max: function(value) {
125 return Math.round(value.max + 0.5);
126 }
127 },
128 dataZoom: [
129 {
130 type: 'slider',
131 xAxisIndex: 0,
132 filterMode: 'none'
133 },
134 ],
135 series: [
136 {
137 name: '{{ measurement.value_type.quantity }}',
138 type: 'line',
139 symbol: 'none',
140 data: commitCountData
141 }
142 ]
143 };
144
145 // Draw chart
146 const draw_chart = (chart_id, option) => {
147 let chart_name
148 const chart_div = document.getElementById(chart_id);
149 // Set dark mode
150 if (window.matchMedia('(prefers-color-scheme: dark)').matches) {
151 chart_name= echarts.init(chart_div, 'dark', {
152 height: 320
153 });
154 } else {
155 chart_name= echarts.init(chart_div, null, {
156 height: 320
157 });
158 }
159 // Change chart size with browser resize
160 window.addEventListener('resize', function() {
161 chart_name.resize();
162 });
163 return chart_name.setOption(option);
164 }
165
166 draw_chart('{{ chart_elem_start_time_id }}', option_start_time)
167 draw_chart('{{ chart_elem_commit_count_id }}', option_commit_count)
168</script>
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html
deleted file mode 100644
index 28cd80e738..0000000000
--- a/scripts/lib/build_perf/html/report.html
+++ /dev/null
@@ -1,408 +0,0 @@
1<!DOCTYPE html>
2<html lang="en">
3<head>
4{# Scripts, for visualization#}
5<!--START-OF-SCRIPTS-->
6<script src=" https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js "></script>
7
8{# Render measurement result charts #}
9{% for test in test_data %}
10 {% if test.status == 'SUCCESS' %}
11 {% for measurement in test.measurements %}
12 {% set chart_elem_start_time_id = test.name + '_' + measurement.name + '_chart_start_time' %}
13 {% set chart_elem_commit_count_id = test.name + '_' + measurement.name + '_chart_commit_count' %}
14 {% include 'measurement_chart.html' %}
15 {% endfor %}
16 {% endif %}
17{% endfor %}
18
19<!--END-OF-SCRIPTS-->
20
21{# Styles #}
22<style>
23:root {
24 --text: #000;
25 --bg: #fff;
26 --h2heading: #707070;
27 --link: #0000EE;
28 --trtopborder: #9ca3af;
29 --trborder: #e5e7eb;
30 --chartborder: #f0f0f0;
31 }
32.meta-table {
33 font-size: 14px;
34 text-align: left;
35 border-collapse: collapse;
36}
37.summary {
38 font-size: 14px;
39 text-align: left;
40 border-collapse: collapse;
41}
42.measurement {
43 padding: 8px 0px 8px 8px;
44 border: 2px solid var(--chartborder);
45 margin: 1.5rem 0;
46}
47.details {
48 margin: 0;
49 font-size: 12px;
50 text-align: left;
51 border-collapse: collapse;
52}
53.details th {
54 padding-right: 8px;
55}
56.details.plain th {
57 font-weight: normal;
58}
59.preformatted {
60 font-family: monospace;
61 white-space: pre-wrap;
62 background-color: #f0f0f0;
63 margin-left: 10px;
64}
65.card-container {
66 border-bottom-width: 1px;
67 padding: 1.25rem 3rem;
68 box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1);
69 border-radius: 0.25rem;
70}
71body {
72 font-family: 'Helvetica', sans-serif;
73 margin: 3rem 8rem;
74 background-color: var(--bg);
75 color: var(--text);
76}
77h1 {
78 text-align: center;
79}
80h2 {
81 font-size: 1.5rem;
82 margin-bottom: 0px;
83 color: var(--h2heading);
84 padding-top: 1.5rem;
85}
86h3 {
87 font-size: 1.3rem;
88 margin: 0px;
89 color: var(--h2heading);
90 padding: 1.5rem 0;
91}
92h4 {
93 font-size: 14px;
94 font-weight: lighter;
95 line-height: 1.2rem;
96 margin: auto;
97 padding-top: 1rem;
98}
99table {
100 margin-top: 1.5rem;
101 line-height: 2rem;
102}
103tr {
104 border-bottom: 1px solid var(--trborder);
105}
106tr:first-child {
107 border-bottom: 1px solid var(--trtopborder);
108}
109tr:last-child {
110 border-bottom: none;
111}
112a {
113 text-decoration: none;
114 font-weight: bold;
115 color: var(--link);
116}
117a:hover {
118 color: #8080ff;
119}
120button {
121 background-color: #F3F4F6;
122 border: none;
123 outline: none;
124 cursor: pointer;
125 padding: 10px 12px;
126 transition: 0.3s;
127 border-radius: 8px;
128 color: #3A4353;
129}
130button:hover {
131 background-color: #d6d9e0;
132}
133.tab button.active {
134 background-color: #d6d9e0;
135}
136@media (prefers-color-scheme: dark) {
137 :root {
138 --text: #e9e8fa;
139 --bg: #0F0C28;
140 --h2heading: #B8B7CB;
141 --link: #87cefa;
142 --trtopborder: #394150;
143 --trborder: #212936;
144 --chartborder: #b1b0bf;
145 }
146 button {
147 background-color: #28303E;
148 color: #fff;
149 }
150 button:hover {
151 background-color: #545a69;
152 }
153 .tab button.active {
154 background-color: #545a69;
155 }
156}
157</style>
158
159<title>{{ title }}</title>
160</head>
161
162{% macro poky_link(commit) -%}
163 <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a>
164{%- endmacro %}
165
166<body><div>
167 <h1 style="text-align: center;">Performance Test Report</h1>
168 {# Test metadata #}
169 <h2>General</h2>
170 <h4>The table provides an overview of the comparison between two selected commits from the same branch.</h4>
171 <table class="meta-table" style="width: 100%">
172 <tr>
173 <th></th>
174 <th>Current commit</th>
175 <th>Comparing with</th>
176 </tr>
177 {% for key, item in metadata.items() %}
178 <tr>
179 <th>{{ item.title }}</th>
180 {%if key == 'commit' %}
181 <td>{{ poky_link(item.value) }}</td>
182 <td>{{ poky_link(item.value_old) }}</td>
183 {% else %}
184 <td>{{ item.value }}</td>
185 <td>{{ item.value_old }}</td>
186 {% endif %}
187 </tr>
188 {% endfor %}
189 </table>
190
191 {# Test result summary #}
192 <h2>Test result summary</h2>
193 <h4>The test summary presents a thorough breakdown of each test conducted on the branch, including details such as build time and disk space consumption. Additionally, it gives insights into the average time taken for test execution, along with absolute and relative values for a better understanding.</h4>
194 <table class="summary" style="width: 100%">
195 <tr>
196 <th>Test name</th>
197 <th>Measurement description</th>
198 <th>Mean value</th>
199 <th>Absolute difference</th>
200 <th>Relative difference</th>
201 </tr>
202 {% for test in test_data %}
203 {% if test.status == 'SUCCESS' %}
204 {% for measurement in test.measurements %}
205 <tr {{ row_style }}>
206 {% if loop.index == 1 %}
207 <td><a href=#{{test.name}}>{{ test.name }}: {{ test.description }}</a></td>
208 {% else %}
209 {# add empty cell in place of the test name#}
210 <td></td>
211 {% endif %}
212 {% if measurement.absdiff > 0 %}
213 {% set result_style = "color: red" %}
214 {% elif measurement.absdiff == measurement.absdiff %}
215 {% set result_style = "color: green" %}
216 {% else %}
217 {% set result_style = "color: orange" %}
218 {%endif %}
219 {% if measurement.reldiff|abs > 2 %}
220 {% set result_style = result_style + "; font-weight: bold" %}
221 {% endif %}
222 <td>{{ measurement.description }}</td>
223 <td style="font-weight: bold">{{ measurement.value.mean }}</td>
224 <td style="{{ result_style }}">{{ measurement.absdiff_str }}</td>
225 <td style="{{ result_style }}">{{ measurement.reldiff_str }}</td>
226 </tr>
227 {% endfor %}
228 {% else %}
229 <td style="font-weight: bold; color: red;">{{test.status }}</td>
230 <td></td> <td></td> <td></td> <td></td>
231 {% endif %}
232 {% endfor %}
233 </table>
234
235 {# Detailed test results #}
236 <h2>Test details</h2>
237 <h4>The following section provides details of each test, accompanied by charts representing build time and disk usage over time or by commit number.</h4>
238 {% for test in test_data %}
239 <h3 style="color: #000;" id={{test.name}}>{{ test.name }}: {{ test.description }}</h3>
240 {% if test.status == 'SUCCESS' %}
241 <div class="card-container">
242 {% for measurement in test.measurements %}
243 <div class="measurement">
244 <h3>{{ measurement.description }}</h3>
245 <div style="font-weight:bold;">
246 <span style="font-size: 23px;">{{ measurement.value.mean }}</span>
247 <span style="font-size: 20px; margin-left: 12px">
248 {% if measurement.absdiff > 0 %}
249 <span style="color: red">
250 {% elif measurement.absdiff == measurement.absdiff %}
251 <span style="color: green">
252 {% else %}
253 <span style="color: orange">
254 {% endif %}
255 {{ measurement.absdiff_str }} ({{measurement.reldiff_str}})
256 </span></span>
257 </div>
258 {# Table for trendchart and the statistics #}
259 <table style="width: 100%">
260 <tr>
261 <td style="width: 75%">
262 {# Linechart #}
263 <div class="tab {{ test.name }}_{{ measurement.name }}_tablinks">
264 <button class="tablinks active" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_start_time', '{{ test.name }}_{{ measurement.name }}')">Chart with start time</button>
265 <button class="tablinks" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_commit_count', '{{ test.name }}_{{ measurement.name }}')">Chart with commit count</button>
266 </div>
267 <div class="{{ test.name }}_{{ measurement.name }}_tabcontent">
268 <div id="{{ test.name }}_{{ measurement.name }}_start_time" class="tabcontent" style="display: block;">
269 <div id="{{ test.name }}_{{ measurement.name }}_chart_start_time"></div>
270 </div>
271 <div id="{{ test.name }}_{{ measurement.name }}_commit_count" class="tabcontent" style="display: none;">
272 <div id="{{ test.name }}_{{ measurement.name }}_chart_commit_count"></div>
273 </div>
274 </div>
275 </td>
276 <td>
277 {# Measurement statistics #}
278 <table class="details plain">
279 <tr>
280 <th>Test runs</th><td>{{ measurement.value.sample_cnt }}</td>
281 </tr><tr>
282 <th>-/+</th><td>-{{ measurement.value.minus }} / +{{ measurement.value.plus }}</td>
283 </tr><tr>
284 <th>Min</th><td>{{ measurement.value.min }}</td>
285 </tr><tr>
286 <th>Max</th><td>{{ measurement.value.max }}</td>
287 </tr><tr>
288 <th>Stdev</th><td>{{ measurement.value.stdev }}</td>
289 </tr><tr>
290 <th><div id="{{ test.name }}_{{ measurement.name }}_chart_png"></div></th>
291 <td></td>
292 </tr>
293 </table>
294 </td>
295 </tr>
296 </table>
297
298 {# Task and recipe summary from buildstats #}
299 {% if 'buildstats' in measurement %}
300 Task resource usage
301 <table class="details" style="width:100%">
302 <tr>
303 <th>Number of tasks</th>
304 <th>Top consumers of cputime</th>
305 </tr>
306 <tr>
307 <td style="vertical-align: top">{{ measurement.buildstats.tasks.count }} ({{ measurement.buildstats.tasks.change }})</td>
308 {# Table of most resource-hungry tasks #}
309 <td>
310 <table class="details plain">
311 {% for diff in measurement.buildstats.top_consumer|reverse %}
312 <tr>
313 <th>{{ diff.pkg }}.{{ diff.task }}</th>
314 <td>{{ '%0.0f' % diff.value2 }} s</td>
315 </tr>
316 {% endfor %}
317 </table>
318 </td>
319 </tr>
320 <tr>
321 <th>Biggest increase in cputime</th>
322 <th>Biggest decrease in cputime</th>
323 </tr>
324 <tr>
325 {# Table biggest increase in resource usage #}
326 <td>
327 <table class="details plain">
328 {% for diff in measurement.buildstats.top_increase|reverse %}
329 <tr>
330 <th>{{ diff.pkg }}.{{ diff.task }}</th>
331 <td>{{ '%+0.0f' % diff.absdiff }} s</td>
332 </tr>
333 {% endfor %}
334 </table>
335 </td>
336 {# Table biggest decrease in resource usage #}
337 <td>
338 <table class="details plain">
339 {% for diff in measurement.buildstats.top_decrease %}
340 <tr>
341 <th>{{ diff.pkg }}.{{ diff.task }}</th>
342 <td>{{ '%+0.0f' % diff.absdiff }} s</td>
343 </tr>
344 {% endfor %}
345 </table>
346 </td>
347 </tr>
348 </table>
349
350 {# Recipe version differences #}
351 {% if measurement.buildstats.ver_diff %}
352 <div style="margin-top: 16px">Recipe version changes</div>
353 <table class="details">
354 {% for head, recipes in measurement.buildstats.ver_diff.items() %}
355 <tr>
356 <th colspan="2">{{ head }}</th>
357 </tr>
358 {% for name, info in recipes|sort %}
359 <tr>
360 <td>{{ name }}</td>
361 <td>{{ info }}</td>
362 </tr>
363 {% endfor %}
364 {% endfor %}
365 </table>
366 {% else %}
367 <div style="margin-top: 16px">No recipe version changes detected</div>
368 {% endif %}
369 {% endif %}
370 </div>
371 {% endfor %}
372 </div>
373 {# Unsuccessful test #}
374 {% else %}
375 <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }}
376 {% if test.err_type %}<span style="font-size: 75%; font-weight: normal">({{ test.err_type }})</span>{% endif %}
377 </span>
378 <div class="preformatted">{{ test.message }}</div>
379 {% endif %}
380 {% endfor %}
381</div>
382
383<script>
384function openChart(event, chartType, chartName) {
385 let i, tabcontents, tablinks
386 tabcontents = document.querySelectorAll(`.${chartName}_tabcontent > .tabcontent`);
387 tabcontents.forEach((tabcontent) => {
388 tabcontent.style.display = "none";
389 });
390
391 tablinks = document.querySelectorAll(`.${chartName}_tablinks > .tablinks`);
392 tablinks.forEach((tabLink) => {
393 tabLink.classList.remove('active');
394 });
395
396 const targetTab = document.getElementById(chartType)
397 targetTab.style.display = "block";
398
399 // Call resize on the ECharts instance to redraw the chart
400 const chartContainer = targetTab.querySelector('div')
401 echarts.init(chartContainer).resize();
402
403 event.currentTarget.classList.add('active');
404}
405</script>
406
407</body>
408</html>
diff --git a/scripts/lib/build_perf/report.py b/scripts/lib/build_perf/report.py
deleted file mode 100644
index f4e6a92e09..0000000000
--- a/scripts/lib/build_perf/report.py
+++ /dev/null
@@ -1,342 +0,0 @@
1#
2# Copyright (c) 2017, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Handling of build perf test reports"""
7from collections import OrderedDict, namedtuple
8from collections.abc import Mapping
9from datetime import datetime, timezone
10from numbers import Number
11from statistics import mean, stdev, variance
12
13
14AggregateTestData = namedtuple('AggregateTestData', ['metadata', 'results'])
15
16
17def isofmt_to_timestamp(string):
18 """Convert timestamp string in ISO 8601 format into unix timestamp"""
19 if '.' in string:
20 dt = datetime.strptime(string, '%Y-%m-%dT%H:%M:%S.%f')
21 else:
22 dt = datetime.strptime(string, '%Y-%m-%dT%H:%M:%S')
23 return dt.replace(tzinfo=timezone.utc).timestamp()
24
25
26def metadata_xml_to_json(elem):
27 """Convert metadata xml into JSON format"""
28 assert elem.tag == 'metadata', "Invalid metadata file format"
29
30 def _xml_to_json(elem):
31 """Convert xml element to JSON object"""
32 out = OrderedDict()
33 for child in elem.getchildren():
34 key = child.attrib.get('name', child.tag)
35 if len(child):
36 out[key] = _xml_to_json(child)
37 else:
38 out[key] = child.text
39 return out
40 return _xml_to_json(elem)
41
42
43def results_xml_to_json(elem):
44 """Convert results xml into JSON format"""
45 rusage_fields = ('ru_utime', 'ru_stime', 'ru_maxrss', 'ru_minflt',
46 'ru_majflt', 'ru_inblock', 'ru_oublock', 'ru_nvcsw',
47 'ru_nivcsw')
48 iostat_fields = ('rchar', 'wchar', 'syscr', 'syscw', 'read_bytes',
49 'write_bytes', 'cancelled_write_bytes')
50
51 def _read_measurement(elem):
52 """Convert measurement to JSON"""
53 data = OrderedDict()
54 data['type'] = elem.tag
55 data['name'] = elem.attrib['name']
56 data['legend'] = elem.attrib['legend']
57 values = OrderedDict()
58
59 # SYSRES measurement
60 if elem.tag == 'sysres':
61 for subel in elem:
62 if subel.tag == 'time':
63 values['start_time'] = isofmt_to_timestamp(subel.attrib['timestamp'])
64 values['elapsed_time'] = float(subel.text)
65 elif subel.tag == 'rusage':
66 rusage = OrderedDict()
67 for field in rusage_fields:
68 if 'time' in field:
69 rusage[field] = float(subel.attrib[field])
70 else:
71 rusage[field] = int(subel.attrib[field])
72 values['rusage'] = rusage
73 elif subel.tag == 'iostat':
74 values['iostat'] = OrderedDict([(f, int(subel.attrib[f]))
75 for f in iostat_fields])
76 elif subel.tag == 'buildstats_file':
77 values['buildstats_file'] = subel.text
78 else:
79 raise TypeError("Unknown sysres value element '{}'".format(subel.tag))
80 # DISKUSAGE measurement
81 elif elem.tag == 'diskusage':
82 values['size'] = int(elem.find('size').text)
83 else:
84 raise Exception("Unknown measurement tag '{}'".format(elem.tag))
85 data['values'] = values
86 return data
87
88 def _read_testcase(elem):
89 """Convert testcase into JSON"""
90 assert elem.tag == 'testcase', "Expecting 'testcase' element instead of {}".format(elem.tag)
91
92 data = OrderedDict()
93 data['name'] = elem.attrib['name']
94 data['description'] = elem.attrib['description']
95 data['status'] = 'SUCCESS'
96 data['start_time'] = isofmt_to_timestamp(elem.attrib['timestamp'])
97 data['elapsed_time'] = float(elem.attrib['time'])
98 measurements = OrderedDict()
99
100 for subel in elem.getchildren():
101 if subel.tag == 'error' or subel.tag == 'failure':
102 data['status'] = subel.tag.upper()
103 data['message'] = subel.attrib['message']
104 data['err_type'] = subel.attrib['type']
105 data['err_output'] = subel.text
106 elif subel.tag == 'skipped':
107 data['status'] = 'SKIPPED'
108 data['message'] = subel.text
109 else:
110 measurements[subel.attrib['name']] = _read_measurement(subel)
111 data['measurements'] = measurements
112 return data
113
114 def _read_testsuite(elem):
115 """Convert suite to JSON"""
116 assert elem.tag == 'testsuite', \
117 "Expecting 'testsuite' element instead of {}".format(elem.tag)
118
119 data = OrderedDict()
120 if 'hostname' in elem.attrib:
121 data['tester_host'] = elem.attrib['hostname']
122 data['start_time'] = isofmt_to_timestamp(elem.attrib['timestamp'])
123 data['elapsed_time'] = float(elem.attrib['time'])
124 tests = OrderedDict()
125
126 for case in elem.getchildren():
127 tests[case.attrib['name']] = _read_testcase(case)
128 data['tests'] = tests
129 return data
130
131 # Main function
132 assert elem.tag == 'testsuites', "Invalid test report format"
133 assert len(elem) == 1, "Too many testsuites"
134
135 return _read_testsuite(elem.getchildren()[0])
136
137
138def aggregate_metadata(metadata):
139 """Aggregate metadata into one, basically a sanity check"""
140 mutable_keys = ('pretty_name', 'version_id')
141
142 def aggregate_obj(aggregate, obj, assert_str=True):
143 """Aggregate objects together"""
144 assert type(aggregate) is type(obj), \
145 "Type mismatch: {} != {}".format(type(aggregate), type(obj))
146 if isinstance(obj, Mapping):
147 assert set(aggregate.keys()) == set(obj.keys())
148 for key, val in obj.items():
149 aggregate_obj(aggregate[key], val, key not in mutable_keys)
150 elif isinstance(obj, list):
151 assert len(aggregate) == len(obj)
152 for i, val in enumerate(obj):
153 aggregate_obj(aggregate[i], val)
154 elif not isinstance(obj, str) or (isinstance(obj, str) and assert_str):
155 assert aggregate == obj, "Data mismatch {} != {}".format(aggregate, obj)
156
157 if not metadata:
158 return {}
159
160 # Do the aggregation
161 aggregate = metadata[0].copy()
162 for testrun in metadata[1:]:
163 aggregate_obj(aggregate, testrun)
164 aggregate['testrun_count'] = len(metadata)
165 return aggregate
166
167
168def aggregate_data(data):
169 """Aggregate multiple test results JSON structures into one"""
170
171 mutable_keys = ('status', 'message', 'err_type', 'err_output')
172
173 class SampleList(list):
174 """Container for numerical samples"""
175 pass
176
177 def new_aggregate_obj(obj):
178 """Create new object for aggregate"""
179 if isinstance(obj, Number):
180 new_obj = SampleList()
181 new_obj.append(obj)
182 elif isinstance(obj, str):
183 new_obj = obj
184 else:
185 # Lists and and dicts are kept as is
186 new_obj = obj.__class__()
187 aggregate_obj(new_obj, obj)
188 return new_obj
189
190 def aggregate_obj(aggregate, obj, assert_str=True):
191 """Recursive "aggregation" of JSON objects"""
192 if isinstance(obj, Number):
193 assert isinstance(aggregate, SampleList)
194 aggregate.append(obj)
195 return
196
197 assert type(aggregate) == type(obj), \
198 "Type mismatch: {} != {}".format(type(aggregate), type(obj))
199 if isinstance(obj, Mapping):
200 for key, val in obj.items():
201 if not key in aggregate:
202 aggregate[key] = new_aggregate_obj(val)
203 else:
204 aggregate_obj(aggregate[key], val, key not in mutable_keys)
205 elif isinstance(obj, list):
206 for i, val in enumerate(obj):
207 if i >= len(aggregate):
208 aggregate[key] = new_aggregate_obj(val)
209 else:
210 aggregate_obj(aggregate[i], val)
211 elif isinstance(obj, str):
212 # Sanity check for data
213 if assert_str:
214 assert aggregate == obj, "Data mismatch {} != {}".format(aggregate, obj)
215 else:
216 raise Exception("BUG: unable to aggregate '{}' ({})".format(type(obj), str(obj)))
217
218 if not data:
219 return {}
220
221 # Do the aggregation
222 aggregate = data[0].__class__()
223 for testrun in data:
224 aggregate_obj(aggregate, testrun)
225 return aggregate
226
227
228class MeasurementVal(float):
229 """Base class representing measurement values"""
230 gv_data_type = 'number'
231
232 def gv_value(self):
233 """Value formatting for visualization"""
234 if self != self:
235 return "null"
236 else:
237 return self
238
239
240class TimeVal(MeasurementVal):
241 """Class representing time values"""
242 quantity = 'time'
243 gv_title = 'elapsed time'
244 gv_data_type = 'timeofday'
245
246 def hms(self):
247 """Split time into hours, minutes and seconeds"""
248 hhh = int(abs(self) / 3600)
249 mmm = int((abs(self) % 3600) / 60)
250 sss = abs(self) % 60
251 return hhh, mmm, sss
252
253 def __str__(self):
254 if self != self:
255 return "nan"
256 hh, mm, ss = self.hms()
257 sign = '-' if self < 0 else ''
258 if hh > 0:
259 return '{}{:d}:{:02d}:{:02.0f}'.format(sign, hh, mm, ss)
260 elif mm > 0:
261 return '{}{:d}:{:04.1f}'.format(sign, mm, ss)
262 elif ss > 1:
263 return '{}{:.1f} s'.format(sign, ss)
264 else:
265 return '{}{:.2f} s'.format(sign, ss)
266
267 def gv_value(self):
268 """Value formatting for visualization"""
269 if self != self:
270 return "null"
271 hh, mm, ss = self.hms()
272 return [hh, mm, int(ss), int(ss*1000) % 1000]
273
274
275class SizeVal(MeasurementVal):
276 """Class representing time values"""
277 quantity = 'size'
278 gv_title = 'size in MiB'
279 gv_data_type = 'number'
280
281 def __str__(self):
282 if self != self:
283 return "nan"
284 if abs(self) < 1024:
285 return '{:.1f} kiB'.format(self)
286 elif abs(self) < 1048576:
287 return '{:.2f} MiB'.format(self / 1024)
288 else:
289 return '{:.2f} GiB'.format(self / 1048576)
290
291 def gv_value(self):
292 """Value formatting for visualization"""
293 if self != self:
294 return "null"
295 return self / 1024
296
297def measurement_stats(meas, prefix='', time=0):
298 """Get statistics of a measurement"""
299 if not meas:
300 return {prefix + 'sample_cnt': 0,
301 prefix + 'mean': MeasurementVal('nan'),
302 prefix + 'stdev': MeasurementVal('nan'),
303 prefix + 'variance': MeasurementVal('nan'),
304 prefix + 'min': MeasurementVal('nan'),
305 prefix + 'max': MeasurementVal('nan'),
306 prefix + 'minus': MeasurementVal('nan'),
307 prefix + 'plus': MeasurementVal('nan')}
308
309 stats = {'name': meas['name']}
310 if meas['type'] == 'sysres':
311 val_cls = TimeVal
312 values = meas['values']['elapsed_time']
313 elif meas['type'] == 'diskusage':
314 val_cls = SizeVal
315 values = meas['values']['size']
316 else:
317 raise Exception("Unknown measurement type '{}'".format(meas['type']))
318 stats['val_cls'] = val_cls
319 stats['quantity'] = val_cls.quantity
320 stats[prefix + 'sample_cnt'] = len(values)
321
322 # Add start time for both type sysres and disk usage
323 start_time = time
324 mean_val = val_cls(mean(values))
325 min_val = val_cls(min(values))
326 max_val = val_cls(max(values))
327
328 stats[prefix + 'mean'] = mean_val
329 if len(values) > 1:
330 stats[prefix + 'stdev'] = val_cls(stdev(values))
331 stats[prefix + 'variance'] = val_cls(variance(values))
332 else:
333 stats[prefix + 'stdev'] = float('nan')
334 stats[prefix + 'variance'] = float('nan')
335 stats[prefix + 'min'] = min_val
336 stats[prefix + 'max'] = max_val
337 stats[prefix + 'minus'] = val_cls(mean_val - min_val)
338 stats[prefix + 'plus'] = val_cls(max_val - mean_val)
339 stats[prefix + 'start_time'] = start_time
340
341 return stats
342
diff --git a/scripts/lib/build_perf/scrape-html-report.js b/scripts/lib/build_perf/scrape-html-report.js
deleted file mode 100644
index 05a1f57001..0000000000
--- a/scripts/lib/build_perf/scrape-html-report.js
+++ /dev/null
@@ -1,56 +0,0 @@
1var fs = require('fs');
2var system = require('system');
3var page = require('webpage').create();
4
5// Examine console log for message from chart drawing
6page.onConsoleMessage = function(msg) {
7 console.log(msg);
8 if (msg === "ALL CHARTS READY") {
9 window.charts_ready = true;
10 }
11 else if (msg.slice(0, 11) === "CHART READY") {
12 var chart_id = msg.split(" ")[2];
13 console.log('grabbing ' + chart_id);
14 var png_data = page.evaluate(function (chart_id) {
15 var chart_div = document.getElementById(chart_id + '_png');
16 return chart_div.outerHTML;
17 }, chart_id);
18 fs.write(args[2] + '/' + chart_id + '.png', png_data, 'w');
19 }
20};
21
22// Check command line arguments
23var args = system.args;
24if (args.length != 3) {
25 console.log("USAGE: " + args[0] + " REPORT_HTML OUT_DIR\n");
26 phantom.exit(1);
27}
28
29// Open the web page
30page.open(args[1], function(status) {
31 if (status == 'fail') {
32 console.log("Failed to open file '" + args[1] + "'");
33 phantom.exit(1);
34 }
35});
36
37// Check status every 100 ms
38interval = window.setInterval(function () {
39 //console.log('waiting');
40 if (window.charts_ready) {
41 clearTimeout(timer);
42 clearInterval(interval);
43
44 var fname = args[1].replace(/\/+$/, "").split("/").pop()
45 console.log("saving " + fname);
46 fs.write(args[2] + '/' + fname, page.content, 'w');
47 phantom.exit(0);
48 }
49}, 100);
50
51// Time-out after 10 seconds
52timer = window.setTimeout(function () {
53 clearInterval(interval);
54 console.log("ERROR: timeout");
55 phantom.exit(1);
56}, 10000);
diff --git a/scripts/lib/buildstats.py b/scripts/lib/buildstats.py
deleted file mode 100644
index 6db60d5bcf..0000000000
--- a/scripts/lib/buildstats.py
+++ /dev/null
@@ -1,368 +0,0 @@
1#
2# Copyright (c) 2017, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Functionality for analyzing buildstats"""
7import json
8import logging
9import os
10import re
11from collections import namedtuple
12from statistics import mean
13
14
15log = logging.getLogger()
16
17
18taskdiff_fields = ('pkg', 'pkg_op', 'task', 'task_op', 'value1', 'value2',
19 'absdiff', 'reldiff')
20TaskDiff = namedtuple('TaskDiff', ' '.join(taskdiff_fields))
21
22
23class BSError(Exception):
24 """Error handling of buildstats"""
25 pass
26
27
28class BSTask(dict):
29 def __init__(self, *args, **kwargs):
30 self['start_time'] = None
31 self['elapsed_time'] = None
32 self['status'] = None
33 self['iostat'] = {}
34 self['rusage'] = {}
35 self['child_rusage'] = {}
36 super(BSTask, self).__init__(*args, **kwargs)
37
38 @property
39 def cputime(self):
40 """Sum of user and system time taken by the task"""
41 rusage = self['rusage']['ru_stime'] + self['rusage']['ru_utime']
42 if self['child_rusage']:
43 # Child rusage may have been optimized out
44 return rusage + self['child_rusage']['ru_stime'] + self['child_rusage']['ru_utime']
45 else:
46 return rusage
47
48 @property
49 def walltime(self):
50 """Elapsed wall clock time"""
51 return self['elapsed_time']
52
53 @property
54 def read_bytes(self):
55 """Bytes read from the block layer"""
56 return self['iostat']['read_bytes']
57
58 @property
59 def write_bytes(self):
60 """Bytes written to the block layer"""
61 return self['iostat']['write_bytes']
62
63 @property
64 def read_ops(self):
65 """Number of read operations on the block layer"""
66 if self['child_rusage']:
67 # Child rusage may have been optimized out
68 return self['rusage']['ru_inblock'] + self['child_rusage']['ru_inblock']
69 else:
70 return self['rusage']['ru_inblock']
71
72 @property
73 def write_ops(self):
74 """Number of write operations on the block layer"""
75 if self['child_rusage']:
76 # Child rusage may have been optimized out
77 return self['rusage']['ru_oublock'] + self['child_rusage']['ru_oublock']
78 else:
79 return self['rusage']['ru_oublock']
80
81 @classmethod
82 def from_file(cls, buildstat_file, fallback_end=0):
83 """Read buildstat text file. fallback_end is an optional end time for tasks that are not recorded as finishing."""
84 bs_task = cls()
85 log.debug("Reading task buildstats from %s", buildstat_file)
86 end_time = None
87 with open(buildstat_file) as fobj:
88 for line in fobj.readlines():
89 key, val = line.split(':', 1)
90 val = val.strip()
91 if key == 'Started':
92 start_time = float(val)
93 bs_task['start_time'] = start_time
94 elif key == 'Ended':
95 end_time = float(val)
96 elif key.startswith('IO '):
97 split = key.split()
98 bs_task['iostat'][split[1]] = int(val)
99 elif key.find('rusage') >= 0:
100 split = key.split()
101 ru_key = split[-1]
102 if ru_key in ('ru_stime', 'ru_utime'):
103 val = float(val)
104 else:
105 val = int(val)
106 ru_type = 'rusage' if split[0] == 'rusage' else \
107 'child_rusage'
108 bs_task[ru_type][ru_key] = val
109 elif key == 'Status':
110 bs_task['status'] = val
111 # If the task didn't finish, fill in the fallback end time if specified
112 if start_time and not end_time and fallback_end:
113 end_time = fallback_end
114 if start_time and end_time:
115 bs_task['elapsed_time'] = end_time - start_time
116 else:
117 raise BSError("{} looks like a invalid buildstats file".format(buildstat_file))
118 return bs_task
119
120
121class BSTaskAggregate(object):
122 """Class representing multiple runs of the same task"""
123 properties = ('cputime', 'walltime', 'read_bytes', 'write_bytes',
124 'read_ops', 'write_ops')
125
126 def __init__(self, tasks=None):
127 self._tasks = tasks or []
128 self._properties = {}
129
130 def __getattr__(self, name):
131 if name in self.properties:
132 if name not in self._properties:
133 # Calculate properties on demand only. We only provide mean
134 # value, so far
135 self._properties[name] = mean([getattr(t, name) for t in self._tasks])
136 return self._properties[name]
137 else:
138 raise AttributeError("'BSTaskAggregate' has no attribute '{}'".format(name))
139
140 def append(self, task):
141 """Append new task"""
142 # Reset pre-calculated properties
143 assert isinstance(task, BSTask), "Type is '{}' instead of 'BSTask'".format(type(task))
144 self._properties = {}
145 self._tasks.append(task)
146
147
148class BSRecipe(object):
149 """Class representing buildstats of one recipe"""
150 def __init__(self, name, epoch, version, revision):
151 self.name = name
152 self.epoch = epoch
153 self.version = version
154 self.revision = revision
155 if epoch is None:
156 self.evr = "{}-{}".format(version, revision)
157 else:
158 self.evr = "{}_{}-{}".format(epoch, version, revision)
159 self.tasks = {}
160
161 def aggregate(self, bsrecipe):
162 """Aggregate data of another recipe buildstats"""
163 if self.nevr != bsrecipe.nevr:
164 raise ValueError("Refusing to aggregate buildstats, recipe version "
165 "differs: {} vs. {}".format(self.nevr, bsrecipe.nevr))
166 if set(self.tasks.keys()) != set(bsrecipe.tasks.keys()):
167 raise ValueError("Refusing to aggregate buildstats, set of tasks "
168 "in {} differ".format(self.name))
169
170 for taskname, taskdata in bsrecipe.tasks.items():
171 if not isinstance(self.tasks[taskname], BSTaskAggregate):
172 self.tasks[taskname] = BSTaskAggregate([self.tasks[taskname]])
173 self.tasks[taskname].append(taskdata)
174
175 @property
176 def nevr(self):
177 return self.name + '-' + self.evr
178
179
180class BuildStats(dict):
181 """Class representing buildstats of one build"""
182
183 @property
184 def num_tasks(self):
185 """Get number of tasks"""
186 num = 0
187 for recipe in self.values():
188 num += len(recipe.tasks)
189 return num
190
191 @classmethod
192 def from_json(cls, bs_json):
193 """Create new BuildStats object from JSON object"""
194 buildstats = cls()
195 for recipe in bs_json:
196 if recipe['name'] in buildstats:
197 raise BSError("Cannot handle multiple versions of the same "
198 "package ({})".format(recipe['name']))
199 bsrecipe = BSRecipe(recipe['name'], recipe['epoch'],
200 recipe['version'], recipe['revision'])
201 for task, data in recipe['tasks'].items():
202 bsrecipe.tasks[task] = BSTask(data)
203
204 buildstats[recipe['name']] = bsrecipe
205
206 return buildstats
207
208 @staticmethod
209 def from_file_json(path):
210 """Load buildstats from a JSON file"""
211 with open(path) as fobj:
212 bs_json = json.load(fobj)
213 return BuildStats.from_json(bs_json)
214
215
216 @staticmethod
217 def split_nevr(nevr):
218 """Split name and version information from recipe "nevr" string"""
219 n_e_v, revision = nevr.rsplit('-', 1)
220 match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[0-9]\S*)$',
221 n_e_v)
222 if not match:
223 # If we're not able to parse a version starting with a number, just
224 # take the part after last dash
225 match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[^-]+)$',
226 n_e_v)
227 name = match.group('name')
228 version = match.group('version')
229 epoch = match.group('epoch')
230 return name, epoch, version, revision
231
232 @staticmethod
233 def parse_top_build_stats(path):
234 """
235 Parse the top-level build_stats file for build-wide start and duration.
236 """
237 start = elapsed = 0
238 with open(path) as fobj:
239 for line in fobj.readlines():
240 key, val = line.split(':', 1)
241 val = val.strip()
242 if key == 'Build Started':
243 start = float(val)
244 elif key == "Elapsed time":
245 elapsed = float(val.split()[0])
246 return start, elapsed
247
248 @classmethod
249 def from_dir(cls, path):
250 """Load buildstats from a buildstats directory"""
251 top_stats = os.path.join(path, 'build_stats')
252 if not os.path.isfile(top_stats):
253 raise BSError("{} does not look like a buildstats directory".format(path))
254
255 log.debug("Reading buildstats directory %s", path)
256 buildstats = cls()
257 build_started, build_elapsed = buildstats.parse_top_build_stats(top_stats)
258 build_end = build_started + build_elapsed
259
260 subdirs = os.listdir(path)
261 for dirname in subdirs:
262 recipe_dir = os.path.join(path, dirname)
263 if dirname == "reduced_proc_pressure" or not os.path.isdir(recipe_dir):
264 continue
265 name, epoch, version, revision = cls.split_nevr(dirname)
266 bsrecipe = BSRecipe(name, epoch, version, revision)
267 for task in os.listdir(recipe_dir):
268 bsrecipe.tasks[task] = BSTask.from_file(
269 os.path.join(recipe_dir, task), build_end)
270 if name in buildstats:
271 raise BSError("Cannot handle multiple versions of the same "
272 "package ({})".format(name))
273 buildstats[name] = bsrecipe
274
275 return buildstats
276
277 def aggregate(self, buildstats):
278 """Aggregate other buildstats into this"""
279 if set(self.keys()) != set(buildstats.keys()):
280 raise ValueError("Refusing to aggregate buildstats, set of "
281 "recipes is different: %s" % (set(self.keys()) ^ set(buildstats.keys())))
282 for pkg, data in buildstats.items():
283 self[pkg].aggregate(data)
284
285
286def diff_buildstats(bs1, bs2, stat_attr, min_val=None, min_absdiff=None, only_tasks=[]):
287 """Compare the tasks of two buildstats"""
288 tasks_diff = []
289 pkgs = set(bs1.keys()).union(set(bs2.keys()))
290 for pkg in pkgs:
291 tasks1 = bs1[pkg].tasks if pkg in bs1 else {}
292 tasks2 = bs2[pkg].tasks if pkg in bs2 else {}
293 if only_tasks:
294 tasks1 = {k: v for k, v in tasks1.items() if k in only_tasks}
295 tasks2 = {k: v for k, v in tasks2.items() if k in only_tasks}
296
297 if not tasks1:
298 pkg_op = '+'
299 elif not tasks2:
300 pkg_op = '-'
301 else:
302 pkg_op = ' '
303
304 for task in set(tasks1.keys()).union(set(tasks2.keys())):
305 task_op = ' '
306 if task in tasks1:
307 val1 = getattr(bs1[pkg].tasks[task], stat_attr)
308 else:
309 task_op = '+'
310 val1 = 0
311 if task in tasks2:
312 val2 = getattr(bs2[pkg].tasks[task], stat_attr)
313 else:
314 val2 = 0
315 task_op = '-'
316
317 if val1 == 0:
318 reldiff = float('inf')
319 else:
320 reldiff = 100 * (val2 - val1) / val1
321
322 if min_val and max(val1, val2) < min_val:
323 log.debug("Filtering out %s:%s (%s)", pkg, task,
324 max(val1, val2))
325 continue
326 if min_absdiff and abs(val2 - val1) < min_absdiff:
327 log.debug("Filtering out %s:%s (difference of %s)", pkg, task,
328 val2-val1)
329 continue
330 tasks_diff.append(TaskDiff(pkg, pkg_op, task, task_op, val1, val2,
331 val2-val1, reldiff))
332 return tasks_diff
333
334
335class BSVerDiff(object):
336 """Class representing recipe version differences between two buildstats"""
337 def __init__(self, bs1, bs2):
338 RecipeVerDiff = namedtuple('RecipeVerDiff', 'left right')
339
340 recipes1 = set(bs1.keys())
341 recipes2 = set(bs2.keys())
342
343 self.new = dict([(r, bs2[r]) for r in sorted(recipes2 - recipes1)])
344 self.dropped = dict([(r, bs1[r]) for r in sorted(recipes1 - recipes2)])
345 self.echanged = {}
346 self.vchanged = {}
347 self.rchanged = {}
348 self.unchanged = {}
349 self.empty_diff = False
350
351 common = recipes2.intersection(recipes1)
352 if common:
353 for recipe in common:
354 rdiff = RecipeVerDiff(bs1[recipe], bs2[recipe])
355 if bs1[recipe].epoch != bs2[recipe].epoch:
356 self.echanged[recipe] = rdiff
357 elif bs1[recipe].version != bs2[recipe].version:
358 self.vchanged[recipe] = rdiff
359 elif bs1[recipe].revision != bs2[recipe].revision:
360 self.rchanged[recipe] = rdiff
361 else:
362 self.unchanged[recipe] = rdiff
363
364 if len(recipes1) == len(recipes2) == len(self.unchanged):
365 self.empty_diff = True
366
367 def __bool__(self):
368 return not self.empty_diff
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py
deleted file mode 100644
index 86aadf39a6..0000000000
--- a/scripts/lib/checklayer/__init__.py
+++ /dev/null
@@ -1,466 +0,0 @@
1# Yocto Project layer check tool
2#
3# Copyright (C) 2017 Intel Corporation
4#
5# SPDX-License-Identifier: MIT
6#
7
8import os
9import re
10import subprocess
11from enum import Enum
12
13import bb.tinfoil
14
15class LayerType(Enum):
16 BSP = 0
17 DISTRO = 1
18 SOFTWARE = 2
19 CORE = 3
20 ERROR_NO_LAYER_CONF = 98
21 ERROR_BSP_DISTRO = 99
22
23def _get_configurations(path):
24 configs = []
25
26 for f in os.listdir(path):
27 file_path = os.path.join(path, f)
28 if os.path.isfile(file_path) and f.endswith('.conf'):
29 configs.append(f[:-5]) # strip .conf
30 return configs
31
32def _get_layer_collections(layer_path, lconf=None, data=None):
33 import bb.parse
34 import bb.data
35
36 if lconf is None:
37 lconf = os.path.join(layer_path, 'conf', 'layer.conf')
38
39 if data is None:
40 ldata = bb.data.init()
41 bb.parse.init_parser(ldata)
42 else:
43 ldata = data.createCopy()
44
45 ldata.setVar('LAYERDIR', layer_path)
46 try:
47 ldata = bb.parse.handle(lconf, ldata, include=True, baseconfig=True)
48 except:
49 raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path)
50 ldata.expandVarref('LAYERDIR')
51
52 collections = (ldata.getVar('BBFILE_COLLECTIONS') or '').split()
53 if not collections:
54 name = os.path.basename(layer_path)
55 collections = [name]
56
57 collections = {c: {} for c in collections}
58 for name in collections:
59 priority = ldata.getVar('BBFILE_PRIORITY_%s' % name)
60 pattern = ldata.getVar('BBFILE_PATTERN_%s' % name)
61 depends = ldata.getVar('LAYERDEPENDS_%s' % name)
62 compat = ldata.getVar('LAYERSERIES_COMPAT_%s' % name)
63 try:
64 depDict = bb.utils.explode_dep_versions2(depends or "")
65 except bb.utils.VersionStringException as vse:
66 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (name, str(vse)))
67
68 collections[name]['priority'] = priority
69 collections[name]['pattern'] = pattern
70 collections[name]['depends'] = ' '.join(depDict.keys())
71 collections[name]['compat'] = compat
72
73 return collections
74
75def _detect_layer(layer_path):
76 """
77 Scans layer directory to detect what type of layer
78 is BSP, Distro or Software.
79
80 Returns a dictionary with layer name, type and path.
81 """
82
83 layer = {}
84 layer_name = os.path.basename(layer_path)
85
86 layer['name'] = layer_name
87 layer['path'] = layer_path
88 layer['conf'] = {}
89
90 if not os.path.isfile(os.path.join(layer_path, 'conf', 'layer.conf')):
91 layer['type'] = LayerType.ERROR_NO_LAYER_CONF
92 return layer
93
94 machine_conf = os.path.join(layer_path, 'conf', 'machine')
95 distro_conf = os.path.join(layer_path, 'conf', 'distro')
96
97 is_bsp = False
98 is_distro = False
99
100 if os.path.isdir(machine_conf):
101 machines = _get_configurations(machine_conf)
102 if machines:
103 is_bsp = True
104
105 if os.path.isdir(distro_conf):
106 distros = _get_configurations(distro_conf)
107 if distros:
108 is_distro = True
109
110 layer['collections'] = _get_layer_collections(layer['path'])
111
112 if layer_name == "meta" and "core" in layer['collections']:
113 layer['type'] = LayerType.CORE
114 layer['conf']['machines'] = machines
115 layer['conf']['distros'] = distros
116 elif is_bsp and is_distro:
117 layer['type'] = LayerType.ERROR_BSP_DISTRO
118 elif is_bsp:
119 layer['type'] = LayerType.BSP
120 layer['conf']['machines'] = machines
121 elif is_distro:
122 layer['type'] = LayerType.DISTRO
123 layer['conf']['distros'] = distros
124 else:
125 layer['type'] = LayerType.SOFTWARE
126
127 return layer
128
129def detect_layers(layer_directories, no_auto):
130 layers = []
131
132 for directory in layer_directories:
133 directory = os.path.realpath(directory)
134 if directory[-1] == '/':
135 directory = directory[0:-1]
136
137 if no_auto:
138 conf_dir = os.path.join(directory, 'conf')
139 if os.path.isdir(conf_dir):
140 layer = _detect_layer(directory)
141 if layer:
142 layers.append(layer)
143 else:
144 for root, dirs, files in os.walk(directory):
145 dir_name = os.path.basename(root)
146 conf_dir = os.path.join(root, 'conf')
147 if os.path.isdir(conf_dir):
148 layer = _detect_layer(root)
149 if layer:
150 layers.append(layer)
151
152 return layers
153
154def _find_layer(depend, layers):
155 for layer in layers:
156 if 'collections' not in layer:
157 continue
158
159 for collection in layer['collections']:
160 if depend == collection:
161 return layer
162 return None
163
164def sanity_check_layers(layers, logger):
165 """
166 Check that we didn't find duplicate collection names, as the layer that will
167 be used is non-deterministic. The precise check is duplicate collections
168 with different patterns, as the same pattern being repeated won't cause
169 problems.
170 """
171 import collections
172
173 passed = True
174 seen = collections.defaultdict(set)
175 for layer in layers:
176 for name, data in layer.get("collections", {}).items():
177 seen[name].add(data["pattern"])
178
179 for name, patterns in seen.items():
180 if len(patterns) > 1:
181 passed = False
182 logger.error("Collection %s found multiple times: %s" % (name, ", ".join(patterns)))
183 return passed
184
185def get_layer_dependencies(layer, layers, logger):
186 def recurse_dependencies(depends, layer, layers, logger, ret = []):
187 logger.debug('Processing dependencies %s for layer %s.' % \
188 (depends, layer['name']))
189
190 for depend in depends.split():
191 # core (oe-core) is suppose to be provided
192 if depend == 'core':
193 continue
194
195 layer_depend = _find_layer(depend, layers)
196 if not layer_depend:
197 logger.error('Layer %s depends on %s and isn\'t found.' % \
198 (layer['name'], depend))
199 ret = None
200 continue
201
202 # We keep processing, even if ret is None, this allows us to report
203 # multiple errors at once
204 if ret is not None and layer_depend not in ret:
205 ret.append(layer_depend)
206 else:
207 # we might have processed this dependency already, in which case
208 # we should not do it again (avoid recursive loop)
209 continue
210
211 # Recursively process...
212 if 'collections' not in layer_depend:
213 continue
214
215 for collection in layer_depend['collections']:
216 collect_deps = layer_depend['collections'][collection]['depends']
217 if not collect_deps:
218 continue
219 ret = recurse_dependencies(collect_deps, layer_depend, layers, logger, ret)
220
221 return ret
222
223 layer_depends = []
224 for collection in layer['collections']:
225 depends = layer['collections'][collection]['depends']
226 if not depends:
227 continue
228
229 layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends)
230
231 # Note: [] (empty) is allowed, None is not!
232 return layer_depends
233
234def add_layer_dependencies(bblayersconf, layer, layers, logger):
235
236 layer_depends = get_layer_dependencies(layer, layers, logger)
237 if layer_depends is None:
238 return False
239 else:
240 add_layers(bblayersconf, layer_depends, logger)
241
242 return True
243
244def add_layers(bblayersconf, layers, logger):
245 # Don't add a layer that is already present.
246 added = set()
247 output = check_command('Getting existing layers failed.', 'bitbake-layers show-layers').decode('utf-8')
248 for layer, path, pri in re.findall(r'^(\S+) +([^\n]*?) +(\d+)$', output, re.MULTILINE):
249 added.add(path)
250
251 with open(bblayersconf, 'a+') as f:
252 for layer in layers:
253 logger.info('Adding layer %s' % layer['name'])
254 name = layer['name']
255 path = layer['path']
256 if path in added:
257 logger.info('%s is already in %s' % (name, bblayersconf))
258 else:
259 added.add(path)
260 f.write("\nBBLAYERS += \"%s\"\n" % path)
261 return True
262
263def check_bblayers(bblayersconf, layer_path, logger):
264 '''
265 If layer_path found in BBLAYERS return True
266 '''
267 import bb.parse
268 import bb.data
269
270 ldata = bb.parse.handle(bblayersconf, bb.data.init(), include=True)
271 for bblayer in (ldata.getVar('BBLAYERS') or '').split():
272 if os.path.normpath(bblayer) == os.path.normpath(layer_path):
273 return True
274
275 return False
276
277def check_command(error_msg, cmd, cwd=None):
278 '''
279 Run a command under a shell, capture stdout and stderr in a single stream,
280 throw an error when command returns non-zero exit code. Returns the output.
281 '''
282
283 p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=cwd)
284 output, _ = p.communicate()
285 if p.returncode:
286 msg = "%s\nCommand: %s\nOutput:\n%s" % (error_msg, cmd, output.decode('utf-8'))
287 raise RuntimeError(msg)
288 return output
289
290def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
291 import re
292
293 # some recipes needs to be excluded like meta-world-pkgdata
294 # because a layer can add recipes to a world build so signature
295 # will be change
296 exclude_recipes = ('meta-world-pkgdata',)
297
298 sigs = {}
299 tune2tasks = {}
300
301 cmd = 'BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
302 if extravars:
303 cmd += extravars
304 cmd += ' '
305 if machine:
306 cmd += 'MACHINE=%s ' % machine
307 cmd += 'bitbake '
308 if failsafe:
309 cmd += '-k '
310 cmd += '-S lockedsigs world'
311 sigs_file = os.path.join(builddir, 'locked-sigs.inc')
312 if os.path.exists(sigs_file):
313 os.unlink(sigs_file)
314 try:
315 check_command('Generating signatures failed. This might be due to some parse error and/or general layer incompatibilities.',
316 cmd, builddir)
317 except RuntimeError as ex:
318 if failsafe and os.path.exists(sigs_file):
319 # Ignore the error here. Most likely some recipes active
320 # in a world build lack some dependencies. There is a
321 # separate test_machine_world_build which exposes the
322 # failure.
323 pass
324 else:
325 raise
326
327 sig_regex = re.compile(r"^(?P<task>.*:.*):(?P<hash>.*) .$")
328 tune_regex = re.compile(r"(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
329 current_tune = None
330 with open(sigs_file, 'r') as f:
331 for line in f.readlines():
332 line = line.strip()
333 t = tune_regex.search(line)
334 if t:
335 current_tune = t.group('tune')
336 s = sig_regex.match(line)
337 if s:
338 exclude = False
339 for er in exclude_recipes:
340 (recipe, task) = s.group('task').split(':')
341 if er == recipe:
342 exclude = True
343 break
344 if exclude:
345 continue
346
347 sigs[s.group('task')] = s.group('hash')
348 tune2tasks.setdefault(current_tune, []).append(s.group('task'))
349
350 if not sigs:
351 raise RuntimeError('Can\'t load signatures from %s' % sigs_file)
352
353 return (sigs, tune2tasks)
354
355def get_depgraph(targets=['world'], failsafe=False):
356 '''
357 Returns the dependency graph for the given target(s).
358 The dependency graph is taken directly from DepTreeEvent.
359 '''
360 depgraph = None
361 with bb.tinfoil.Tinfoil() as tinfoil:
362 tinfoil.prepare(config_only=False)
363 tinfoil.set_event_mask(['bb.event.NoProvider', 'bb.event.DepTreeGenerated', 'bb.command.CommandCompleted'])
364 if not tinfoil.run_command('generateDepTreeEvent', targets, 'do_build'):
365 raise RuntimeError('starting generateDepTreeEvent failed')
366 while True:
367 event = tinfoil.wait_event(timeout=1000)
368 if event:
369 if isinstance(event, bb.command.CommandFailed):
370 raise RuntimeError('Generating dependency information failed: %s' % event.error)
371 elif isinstance(event, bb.command.CommandCompleted):
372 break
373 elif isinstance(event, bb.event.NoProvider):
374 if failsafe:
375 # The event is informational, we will get information about the
376 # remaining dependencies eventually and thus can ignore this
377 # here like we do in get_signatures(), if desired.
378 continue
379 if event._reasons:
380 raise RuntimeError('Nothing provides %s: %s' % (event._item, event._reasons))
381 else:
382 raise RuntimeError('Nothing provides %s.' % (event._item))
383 elif isinstance(event, bb.event.DepTreeGenerated):
384 depgraph = event._depgraph
385
386 if depgraph is None:
387 raise RuntimeError('Could not retrieve the depgraph.')
388 return depgraph
389
390def compare_signatures(old_sigs, curr_sigs):
391 '''
392 Compares the result of two get_signatures() calls. Returns None if no
393 problems found, otherwise a string that can be used as additional
394 explanation in self.fail().
395 '''
396 # task -> (old signature, new signature)
397 sig_diff = {}
398 for task in old_sigs:
399 if task in curr_sigs and \
400 old_sigs[task] != curr_sigs[task]:
401 sig_diff[task] = (old_sigs[task], curr_sigs[task])
402
403 if not sig_diff:
404 return None
405
406 # Beware, depgraph uses task=<pn>.<taskname> whereas get_signatures()
407 # uses <pn>:<taskname>. Need to convert sometimes. The output follows
408 # the convention from get_signatures() because that seems closer to
409 # normal bitbake output.
410 def sig2graph(task):
411 pn, taskname = task.rsplit(':', 1)
412 return pn + '.' + taskname
413 def graph2sig(task):
414 pn, taskname = task.rsplit('.', 1)
415 return pn + ':' + taskname
416 depgraph = get_depgraph(failsafe=True)
417 depends = depgraph['tdepends']
418
419 # If a task A has a changed signature, but none of its
420 # dependencies, then we need to report it because it is
421 # the one which introduces a change. Any task depending on
422 # A (directly or indirectly) will also have a changed
423 # signature, but we don't need to report it. It might have
424 # its own changes, which will become apparent once the
425 # issues that we do report are fixed and the test gets run
426 # again.
427 sig_diff_filtered = []
428 for task, (old_sig, new_sig) in sig_diff.items():
429 deps_tainted = False
430 for dep in depends.get(sig2graph(task), ()):
431 if graph2sig(dep) in sig_diff:
432 deps_tainted = True
433 break
434 if not deps_tainted:
435 sig_diff_filtered.append((task, old_sig, new_sig))
436
437 msg = []
438 msg.append('%d signatures changed, initial differences (first hash before, second after):' %
439 len(sig_diff))
440 for diff in sorted(sig_diff_filtered):
441 recipe, taskname = diff[0].rsplit(':', 1)
442 cmd = 'bitbake-diffsigs --task %s %s --signature %s %s' % \
443 (recipe, taskname, diff[1], diff[2])
444 msg.append(' %s: %s -> %s' % diff)
445 msg.append(' %s' % cmd)
446 try:
447 output = check_command('Determining signature difference failed.',
448 cmd).decode('utf-8')
449 except RuntimeError as error:
450 output = str(error)
451 if output:
452 msg.extend([' ' + line for line in output.splitlines()])
453 msg.append('')
454 return '\n'.join(msg)
455
456
457def get_git_toplevel(directory):
458 """
459 Try and find the top of the git repository that directory might be in.
460 Returns the top-level directory, or None.
461 """
462 cmd = ["git", "-C", directory, "rev-parse", "--show-toplevel"]
463 try:
464 return subprocess.check_output(cmd, text=True).strip()
465 except:
466 return None
diff --git a/scripts/lib/checklayer/case.py b/scripts/lib/checklayer/case.py
deleted file mode 100644
index fa9dee384e..0000000000
--- a/scripts/lib/checklayer/case.py
+++ /dev/null
@@ -1,9 +0,0 @@
1# Copyright (C) 2017 Intel Corporation
2#
3# SPDX-License-Identifier: MIT
4#
5
6from oeqa.core.case import OETestCase
7
8class OECheckLayerTestCase(OETestCase):
9 pass
diff --git a/scripts/lib/checklayer/cases/__init__.py b/scripts/lib/checklayer/cases/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/checklayer/cases/__init__.py
+++ /dev/null
diff --git a/scripts/lib/checklayer/cases/bsp.py b/scripts/lib/checklayer/cases/bsp.py
deleted file mode 100644
index b76163fb56..0000000000
--- a/scripts/lib/checklayer/cases/bsp.py
+++ /dev/null
@@ -1,206 +0,0 @@
1# Copyright (C) 2017 Intel Corporation
2#
3# SPDX-License-Identifier: MIT
4#
5
6import unittest
7
8from checklayer import LayerType, get_signatures, check_command, get_depgraph
9from checklayer.case import OECheckLayerTestCase
10
11class BSPCheckLayer(OECheckLayerTestCase):
12 @classmethod
13 def setUpClass(self):
14 if self.tc.layer['type'] not in (LayerType.BSP, LayerType.CORE):
15 raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\
16 self.tc.layer['name'])
17
18 def test_bsp_defines_machines(self):
19 self.assertTrue(self.tc.layer['conf']['machines'],
20 "Layer is BSP but doesn't defines machines.")
21
22 def test_bsp_no_set_machine(self):
23 from oeqa.utils.commands import get_bb_var
24
25 machine = get_bb_var('MACHINE')
26 self.assertEqual(self.td['bbvars']['MACHINE'], machine,
27 msg="Layer %s modified machine %s -> %s" % \
28 (self.tc.layer['name'], self.td['bbvars']['MACHINE'], machine))
29
30
31 def test_machine_world(self):
32 '''
33 "bitbake world" is expected to work regardless which machine is selected.
34 BSP layers sometimes break that by enabling a recipe for a certain machine
35 without checking whether that recipe actually can be built in the current
36 distro configuration (for example, OpenGL might not enabled).
37
38 This test iterates over all machines. It would be nicer to instantiate
39 it once per machine. It merely checks for errors during parse
40 time. It does not actually attempt to build anything.
41 '''
42
43 if not self.td['machines']:
44 self.skipTest('No machines set with --machines.')
45 msg = []
46 for machine in self.td['machines']:
47 # In contrast to test_machine_signatures() below, errors are fatal here.
48 try:
49 get_signatures(self.td['builddir'], failsafe=False, machine=machine)
50 except RuntimeError as ex:
51 msg.append(str(ex))
52 if msg:
53 msg.insert(0, 'The following machines broke a world build:')
54 self.fail('\n'.join(msg))
55
56 def test_machine_signatures(self):
57 '''
58 Selecting a machine may only affect the signature of tasks that are specific
59 to that machine. In other words, when MACHINE=A and MACHINE=B share a recipe
60 foo and the output of foo, then both machine configurations must build foo
61 in exactly the same way. Otherwise it is not possible to use both machines
62 in the same distribution.
63
64 This criteria can only be tested by testing different machines in combination,
65 i.e. one main layer, potentially several additional BSP layers and an explicit
66 choice of machines:
67 yocto-check-layer --additional-layers .../meta-intel --machines intel-corei7-64 imx6slevk -- .../meta-freescale
68 '''
69
70 if not self.td['machines']:
71 self.skipTest('No machines set with --machines.')
72
73 # Collect signatures for all machines that we are testing
74 # and merge that into a hash:
75 # tune -> task -> signature -> list of machines with that combination
76 #
77 # It is an error if any tune/task pair has more than one signature,
78 # because that implies that the machines that caused those different
79 # signatures do not agree on how to execute the task.
80 tunes = {}
81 # Preserve ordering of machines as chosen by the user.
82 for machine in self.td['machines']:
83 curr_sigs, tune2tasks = get_signatures(self.td['builddir'], failsafe=True, machine=machine)
84 # Invert the tune -> [tasks] mapping.
85 tasks2tune = {}
86 for tune, tasks in tune2tasks.items():
87 for task in tasks:
88 tasks2tune[task] = tune
89 for task, sighash in curr_sigs.items():
90 tunes.setdefault(tasks2tune[task], {}).setdefault(task, {}).setdefault(sighash, []).append(machine)
91
92 msg = []
93 pruned = 0
94 last_line_key = None
95 # do_fetch, do_unpack, ..., do_build
96 taskname_list = []
97 if tunes:
98 # The output below is most useful when we start with tasks that are at
99 # the bottom of the dependency chain, i.e. those that run first. If
100 # those tasks differ, the rest also does.
101 #
102 # To get an ordering of tasks, we do a topological sort of the entire
103 # depgraph for the base configuration, then on-the-fly flatten that list by stripping
104 # out the recipe names and removing duplicates. The base configuration
105 # is not necessarily representative, but should be close enough. Tasks
106 # that were not encountered get a default priority.
107 depgraph = get_depgraph()
108 depends = depgraph['tdepends']
109 WHITE = 1
110 GRAY = 2
111 BLACK = 3
112 color = {}
113 found = set()
114 def visit(task):
115 color[task] = GRAY
116 for dep in depends.get(task, ()):
117 if color.setdefault(dep, WHITE) == WHITE:
118 visit(dep)
119 color[task] = BLACK
120 pn, taskname = task.rsplit('.', 1)
121 if taskname not in found:
122 taskname_list.append(taskname)
123 found.add(taskname)
124 for task in depends.keys():
125 if color.setdefault(task, WHITE) == WHITE:
126 visit(task)
127
128 taskname_order = dict([(task, index) for index, task in enumerate(taskname_list) ])
129 def task_key(task):
130 pn, taskname = task.rsplit(':', 1)
131 return (pn, taskname_order.get(taskname, len(taskname_list)), taskname)
132
133 for tune in sorted(tunes.keys()):
134 tasks = tunes[tune]
135 # As for test_signatures it would be nicer to sort tasks
136 # by dependencies here, but that is harder because we have
137 # to report on tasks from different machines, which might
138 # have different dependencies. We resort to pruning the
139 # output by reporting only one task per recipe if the set
140 # of machines matches.
141 #
142 # "bitbake-diffsigs -t -s" is intelligent enough to print
143 # diffs recursively, so often it does not matter that much
144 # if we don't pick the underlying difference
145 # here. However, sometimes recursion fails
146 # (https://bugzilla.yoctoproject.org/show_bug.cgi?id=6428).
147 #
148 # To mitigate that a bit, we use a hard-coded ordering of
149 # tasks that represents how they normally run and prefer
150 # to print the ones that run first.
151 for task in sorted(tasks.keys(), key=task_key):
152 signatures = tasks[task]
153 # do_build can be ignored: it is know to have
154 # different signatures in some cases, for example in
155 # the allarch ca-certificates due to RDEPENDS=openssl.
156 # That particular dependency is marked via
157 # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up
158 # in the sstate signature hash because filtering it
159 # out would be hard and running do_build multiple
160 # times doesn't really matter.
161 if len(signatures.keys()) > 1 and \
162 not task.endswith(':do_build'):
163 # Error!
164 #
165 # Sort signatures by machines, because the hex values don't mean anything.
166 # => all-arch adwaita-icon-theme:do_build: 1234... (beaglebone, qemux86) != abcdf... (qemux86-64)
167 #
168 # Skip the line if it is covered already by the predecessor (same pn, same sets of machines).
169 pn, taskname = task.rsplit(':', 1)
170 next_line_key = (pn, sorted(signatures.values()))
171 if next_line_key != last_line_key:
172 line = ' %s %s: ' % (tune, task)
173 line += ' != '.join(['%s (%s)' % (signature, ', '.join([m for m in signatures[signature]])) for
174 signature in sorted(signatures.keys(), key=lambda s: signatures[s])])
175 last_line_key = next_line_key
176 msg.append(line)
177 # Randomly pick two mismatched signatures and remember how to invoke
178 # bitbake-diffsigs for them.
179 iterator = iter(signatures.items())
180 a = next(iterator)
181 b = next(iterator)
182 diffsig_machines = '(%s) != (%s)' % (', '.join(a[1]), ', '.join(b[1]))
183 diffsig_params = '-t %s %s -s %s %s' % (pn, taskname, a[0], b[0])
184 else:
185 pruned += 1
186
187 if msg:
188 msg.insert(0, 'The machines have conflicting signatures for some shared tasks:')
189 if pruned > 0:
190 msg.append('')
191 msg.append('%d tasks where not listed because some other task of the recipe already differed.' % pruned)
192 msg.append('It is likely that differences from different recipes also have the same root cause.')
193 msg.append('')
194 # Explain how to investigate...
195 msg.append('To investigate, run bitbake-diffsigs -t recipename taskname -s fromsig tosig.')
196 cmd = 'bitbake-diffsigs %s' % diffsig_params
197 msg.append('Example: %s in the last line' % diffsig_machines)
198 msg.append('Command: %s' % cmd)
199 # ... and actually do it automatically for that example, but without aborting
200 # when that fails.
201 try:
202 output = check_command('Comparing signatures failed.', cmd).decode('utf-8')
203 except RuntimeError as ex:
204 output = str(ex)
205 msg.extend([' ' + line for line in output.splitlines()])
206 self.fail('\n'.join(msg))
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py
deleted file mode 100644
index ddead69a7b..0000000000
--- a/scripts/lib/checklayer/cases/common.py
+++ /dev/null
@@ -1,135 +0,0 @@
1# Copyright (C) 2017 Intel Corporation
2#
3# SPDX-License-Identifier: MIT
4#
5
6import glob
7import os
8import unittest
9import re
10from checklayer import get_signatures, LayerType, check_command, compare_signatures, get_git_toplevel
11from checklayer.case import OECheckLayerTestCase
12
13class CommonCheckLayer(OECheckLayerTestCase):
14 def test_readme(self):
15 if self.tc.layer['type'] == LayerType.CORE:
16 raise unittest.SkipTest("Core layer's README is top level")
17
18 # The top-level README file may have a suffix (like README.rst or README.txt).
19 readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
20 self.assertTrue(len(readme_files) > 0,
21 msg="Layer doesn't contain a README file.")
22
23 # There might be more than one file matching the file pattern above
24 # (for example, README.rst and README-COPYING.rst). The one with the shortest
25 # name is considered the "main" one.
26 readme_file = sorted(readme_files)[0]
27 data = ''
28 with open(readme_file, 'r') as f:
29 data = f.read()
30 self.assertTrue(data,
31 msg="Layer contains a README file but it is empty.")
32
33 # If a layer's README references another README, then the checks below are not valid
34 if re.search('README', data, re.IGNORECASE):
35 return
36
37 self.assertIn('maintainer', data.lower())
38 self.assertIn('patch', data.lower())
39 # Check that there is an email address in the README
40 email_regex = re.compile(r"[^@]+@[^@]+")
41 self.assertTrue(email_regex.match(data))
42
43 def find_file_by_name(self, globs):
44 """
45 Utility function to find a file that matches the specified list of
46 globs, in either the layer directory itself or the repository top-level
47 directory.
48 """
49 directories = [self.tc.layer["path"]]
50 toplevel = get_git_toplevel(directories[0])
51 if toplevel:
52 directories.append(toplevel)
53
54 for path in directories:
55 for name in globs:
56 files = glob.glob(os.path.join(path, name))
57 if files:
58 return sorted(files)[0]
59 return None
60
61 def test_security(self):
62 """
63 Test that the layer has a SECURITY.md (or similar) file, either in the
64 layer itself or at the top of the containing git repository.
65 """
66 if self.tc.layer["type"] == LayerType.CORE:
67 raise unittest.SkipTest("Core layer's SECURITY is top level")
68
69 filename = self.find_file_by_name(("SECURITY", "SECURITY.*"))
70 self.assertTrue(filename, msg="Layer doesn't contain a SECURITY.md file.")
71
72 size = os.path.getsize(filename)
73 self.assertGreater(size, 0, msg=f"{filename} has no content.")
74
75 def test_parse(self):
76 check_command('Layer %s failed to parse.' % self.tc.layer['name'],
77 'bitbake -p')
78
79 def test_show_environment(self):
80 check_command('Layer %s failed to show environment.' % self.tc.layer['name'],
81 'bitbake -e')
82
83 def test_world(self):
84 '''
85 "bitbake world" is expected to work. test_signatures does not cover that
86 because it is more lenient and ignores recipes in a world build that
87 are not actually buildable, so here we fail when "bitbake -S none world"
88 fails.
89 '''
90 get_signatures(self.td['builddir'], failsafe=False)
91
92 def test_world_inherit_class(self):
93 '''
94 This also does "bitbake -S none world" along with inheriting "yocto-check-layer"
95 class, which can do additional per-recipe test cases.
96 '''
97 msg = []
98 try:
99 get_signatures(self.td['builddir'], failsafe=False, machine=None, extravars='BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS INHERIT" INHERIT="yocto-check-layer"')
100 except RuntimeError as ex:
101 msg.append(str(ex))
102 if msg:
103 msg.insert(0, 'Layer %s failed additional checks from yocto-check-layer.bbclass\nSee below log for specific recipe parsing errors:\n' % \
104 self.tc.layer['name'])
105 self.fail('\n'.join(msg))
106
107 def test_patches_upstream_status(self):
108 import sys
109 sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/'))
110 import oe.qa
111 patches = []
112 for dirpath, dirs, files in os.walk(self.tc.layer['path']):
113 for filename in files:
114 if filename.endswith(".patch"):
115 ppath = os.path.join(dirpath, filename)
116 if oe.qa.check_upstream_status(ppath):
117 patches.append(ppath)
118 self.assertEqual(len(patches), 0 , \
119 msg="Found following patches with malformed or missing upstream status:\n%s" % '\n'.join([str(patch) for patch in patches]))
120
121 def test_signatures(self):
122 if self.tc.layer['type'] == LayerType.SOFTWARE and \
123 not self.tc.test_software_layer_signatures:
124 raise unittest.SkipTest("Not testing for signature changes in a software layer %s." \
125 % self.tc.layer['name'])
126
127 curr_sigs, _ = get_signatures(self.td['builddir'], failsafe=True)
128 msg = compare_signatures(self.td['sigs'], curr_sigs)
129 if msg is not None:
130 self.fail('Adding layer %s changed signatures.\n%s' % (self.tc.layer['name'], msg))
131
132 def test_layerseries_compat(self):
133 for collection_name, collection_data in self.tc.layer['collections'].items():
134 self.assertTrue(collection_data['compat'], "Collection %s from layer %s does not set compatible oe-core versions via LAYERSERIES_COMPAT_collection." \
135 % (collection_name, self.tc.layer['name']))
diff --git a/scripts/lib/checklayer/cases/distro.py b/scripts/lib/checklayer/cases/distro.py
deleted file mode 100644
index a35332451c..0000000000
--- a/scripts/lib/checklayer/cases/distro.py
+++ /dev/null
@@ -1,28 +0,0 @@
1# Copyright (C) 2017 Intel Corporation
2#
3# SPDX-License-Identifier: MIT
4#
5
6import unittest
7
8from checklayer import LayerType
9from checklayer.case import OECheckLayerTestCase
10
11class DistroCheckLayer(OECheckLayerTestCase):
12 @classmethod
13 def setUpClass(self):
14 if self.tc.layer['type'] not in (LayerType.DISTRO, LayerType.CORE):
15 raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\
16 self.tc.layer['name'])
17
18 def test_distro_defines_distros(self):
19 self.assertTrue(self.tc.layer['conf']['distros'],
20 "Layer is BSP but doesn't defines machines.")
21
22 def test_distro_no_set_distros(self):
23 from oeqa.utils.commands import get_bb_var
24
25 distro = get_bb_var('DISTRO')
26 self.assertEqual(self.td['bbvars']['DISTRO'], distro,
27 msg="Layer %s modified distro %s -> %s" % \
28 (self.tc.layer['name'], self.td['bbvars']['DISTRO'], distro))
diff --git a/scripts/lib/checklayer/context.py b/scripts/lib/checklayer/context.py
deleted file mode 100644
index 4de8f668fd..0000000000
--- a/scripts/lib/checklayer/context.py
+++ /dev/null
@@ -1,17 +0,0 @@
1# Copyright (C) 2017 Intel Corporation
2#
3# SPDX-License-Identifier: MIT
4#
5
6import os
7import sys
8import glob
9import re
10
11from oeqa.core.context import OETestContext
12
13class CheckLayerTestContext(OETestContext):
14 def __init__(self, td=None, logger=None, layer=None, test_software_layer_signatures=True):
15 super(CheckLayerTestContext, self).__init__(td, logger)
16 self.layer = layer
17 self.test_software_layer_signatures = test_software_layer_signatures
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
deleted file mode 100644
index fa6e1a34fd..0000000000
--- a/scripts/lib/devtool/__init__.py
+++ /dev/null
@@ -1,404 +0,0 @@
1#!/usr/bin/env python3
2
3# Development tool - utility functions for plugins
4#
5# Copyright (C) 2014 Intel Corporation
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9"""Devtool plugins module"""
10
11import os
12import sys
13import subprocess
14import logging
15import re
16import codecs
17
18logger = logging.getLogger('devtool')
19
20class DevtoolError(Exception):
21 """Exception for handling devtool errors"""
22 def __init__(self, message, exitcode=1):
23 super(DevtoolError, self).__init__(message)
24 self.exitcode = exitcode
25
26
27def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
28 """Run a program in bitbake build context"""
29 import bb
30 if not 'cwd' in options:
31 options["cwd"] = builddir
32 if init_path:
33 # As the OE init script makes use of BASH_SOURCE to determine OEROOT,
34 # and can't determine it when running under dash, we need to set
35 # the executable to bash to correctly set things up
36 if not 'executable' in options:
37 options['executable'] = 'bash'
38 logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
39 init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
40 else:
41 logger.debug('Executing command "%s"' % cmd)
42 init_prefix = ''
43 if watch:
44 if sys.stdout.isatty():
45 # Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
46 cmd = 'script -e -q -c "%s" /dev/null' % cmd
47 return exec_watch('%s%s' % (init_prefix, cmd), **options)
48 else:
49 return bb.process.run('%s%s' % (init_prefix, cmd), **options)
50
51def exec_watch(cmd, **options):
52 """Run program with stdout shown on sys.stdout"""
53 import bb
54 if isinstance(cmd, str) and not "shell" in options:
55 options["shell"] = True
56
57 process = subprocess.Popen(
58 cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
59 )
60
61 reader = codecs.getreader('utf-8')(process.stdout)
62 buf = ''
63 while True:
64 out = reader.read(1, 1)
65 if out:
66 sys.stdout.write(out)
67 sys.stdout.flush()
68 buf += out
69 elif out == '' and process.poll() != None:
70 break
71
72 if process.returncode != 0:
73 raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
74
75 return buf, None
76
77def exec_fakeroot(d, cmd, **kwargs):
78 """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
79 # Grab the command and check it actually exists
80 fakerootcmd = d.getVar('FAKEROOTCMD')
81 fakerootenv = d.getVar('FAKEROOTENV')
82 exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
83
84def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
85 if not os.path.exists(fakerootcmd):
86 logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
87 return 2
88 # Set up the appropriate environment
89 newenv = dict(os.environ)
90 for varvalue in fakerootenv.split():
91 if '=' in varvalue:
92 splitval = varvalue.split('=', 1)
93 newenv[splitval[0]] = splitval[1]
94 return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
95
96def setup_tinfoil(config_only=False, basepath=None, tracking=False):
97 """Initialize tinfoil api from bitbake"""
98 import scriptpath
99 orig_cwd = os.path.abspath(os.curdir)
100 try:
101 if basepath:
102 os.chdir(basepath)
103 bitbakepath = scriptpath.add_bitbake_lib_path()
104 if not bitbakepath:
105 logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
106 sys.exit(1)
107
108 import bb.tinfoil
109 tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
110 try:
111 tinfoil.logger.setLevel(logger.getEffectiveLevel())
112 tinfoil.prepare(config_only)
113 except bb.tinfoil.TinfoilUIException:
114 tinfoil.shutdown()
115 raise DevtoolError('Failed to start bitbake environment')
116 except:
117 tinfoil.shutdown()
118 raise
119 finally:
120 os.chdir(orig_cwd)
121 return tinfoil
122
123def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
124 """Parse the specified recipe"""
125 try:
126 recipefile = tinfoil.get_recipe_file(pn)
127 except bb.providers.NoProvider as e:
128 logger.error(str(e))
129 return None
130 if appends:
131 append_files = tinfoil.get_file_appends(recipefile)
132 if filter_workspace:
133 # Filter out appends from the workspace
134 append_files = [path for path in append_files if
135 not path.startswith(config.workspace_path)]
136 else:
137 append_files = None
138 try:
139 rd = tinfoil.parse_recipe_file(recipefile, appends, append_files)
140 except Exception as e:
141 logger.error(str(e))
142 return None
143 return rd
144
145def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
146 """
147 Check that a recipe is in the workspace and (optionally) that source
148 is present.
149 """
150
151 workspacepn = pn
152
153 for recipe, value in workspace.items():
154 if recipe == pn:
155 break
156 if bbclassextend:
157 recipefile = value['recipefile']
158 if recipefile:
159 targets = get_bbclassextend_targets(recipefile, recipe)
160 if pn in targets:
161 workspacepn = recipe
162 break
163 else:
164 raise DevtoolError("No recipe named '%s' in your workspace" % pn)
165
166 if checksrc:
167 srctree = workspace[workspacepn]['srctree']
168 if not os.path.exists(srctree):
169 raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
170 if not os.listdir(srctree):
171 raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
172
173 return workspacepn
174
175def use_external_build(same_dir, no_same_dir, d):
176 """
177 Determine if we should use B!=S (separate build and source directories) or not
178 """
179 b_is_s = True
180 if no_same_dir:
181 logger.info('Using separate build directory since --no-same-dir specified')
182 b_is_s = False
183 elif same_dir:
184 logger.info('Using source tree as build directory since --same-dir specified')
185 elif bb.data.inherits_class('autotools-brokensep', d):
186 logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
187 elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
188 logger.info('Using source tree as build directory since that would be the default for this recipe')
189 else:
190 b_is_s = False
191 return b_is_s
192
193def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
194 """
195 Set up the git repository for the source tree
196 """
197 import bb.process
198 import oe.patch
199 if not os.path.exists(os.path.join(repodir, '.git')):
200 bb.process.run('git init', cwd=repodir)
201 bb.process.run('git config --local gc.autodetach 0', cwd=repodir)
202 bb.process.run('git add -f -A .', cwd=repodir)
203 commit_cmd = ['git']
204 oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
205 commit_cmd += ['commit', '-q']
206 stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
207 if not stdout:
208 commit_cmd.append('--allow-empty')
209 commitmsg = "Initial empty commit with no upstream sources"
210 elif version:
211 commitmsg = "Initial commit from upstream at version %s" % version
212 else:
213 commitmsg = "Initial commit from upstream"
214 commit_cmd += ['-m', commitmsg]
215 bb.process.run(commit_cmd, cwd=repodir)
216
217 # Ensure singletask.lock (as used by externalsrc.bbclass) is ignored by git
218 gitinfodir = os.path.join(repodir, '.git', 'info')
219 try:
220 os.mkdir(gitinfodir)
221 except FileExistsError:
222 pass
223 excludes = []
224 excludefile = os.path.join(gitinfodir, 'exclude')
225 try:
226 with open(excludefile, 'r') as f:
227 excludes = f.readlines()
228 except FileNotFoundError:
229 pass
230 if 'singletask.lock\n' not in excludes:
231 excludes.append('singletask.lock\n')
232 with open(excludefile, 'w') as f:
233 for line in excludes:
234 f.write(line)
235
236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
237 bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir)
238
239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
241 stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
242 found = False
243 for line in stdout.splitlines():
244 if line.endswith("/"):
245 new_dir = line.split()[1]
246 for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
247 if ".git" in dirs + files:
248 (stdout, _) = bb.process.run('git remote', cwd=root)
249 remote = stdout.splitlines()[0]
250 (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
251 remote_url = stdout.splitlines()[0]
252 logger.error(os.path.relpath(os.path.join(root, ".."), root))
253 bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
254 found = True
255 if found:
256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
257 found = False
258 if os.path.exists(os.path.join(repodir, '.gitmodules')):
259 bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir)
260
261def recipe_to_append(recipefile, config, wildcard=False):
262 """
263 Convert a recipe file to a bbappend file path within the workspace.
264 NOTE: if the bbappend already exists, you should be using
265 workspace[args.recipename]['bbappend'] instead of calling this
266 function.
267 """
268 appendname = os.path.splitext(os.path.basename(recipefile))[0]
269 if wildcard:
270 appendname = re.sub(r'_.*', '_%', appendname)
271 appendpath = os.path.join(config.workspace_path, 'appends')
272 appendfile = os.path.join(appendpath, appendname + '.bbappend')
273 return appendfile
274
275def get_bbclassextend_targets(recipefile, pn):
276 """
277 Cheap function to get BBCLASSEXTEND and then convert that to the
278 list of targets that would result.
279 """
280 import bb.utils
281
282 values = {}
283 def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
284 values[varname] = origvalue
285 return origvalue, None, 0, True
286 with open(recipefile, 'r') as f:
287 bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
288
289 targets = []
290 bbclassextend = values.get('BBCLASSEXTEND', '').split()
291 if bbclassextend:
292 for variant in bbclassextend:
293 if variant == 'nativesdk':
294 targets.append('%s-%s' % (variant, pn))
295 elif variant in ['native', 'cross', 'crosssdk']:
296 targets.append('%s-%s' % (pn, variant))
297 return targets
298
299def replace_from_file(path, old, new):
300 """Replace strings on a file"""
301
302 def read_file(path):
303 data = None
304 with open(path) as f:
305 data = f.read()
306 return data
307
308 def write_file(path, data):
309 if data is None:
310 return
311 wdata = data.rstrip() + "\n"
312 with open(path, "w") as f:
313 f.write(wdata)
314
315 # In case old is None, return immediately
316 if old is None:
317 return
318 try:
319 rdata = read_file(path)
320 except IOError as e:
321 # if file does not exit, just quit, otherwise raise an exception
322 if e.errno == errno.ENOENT:
323 return
324 else:
325 raise
326
327 old_contents = rdata.splitlines()
328 new_contents = []
329 for old_content in old_contents:
330 try:
331 new_contents.append(old_content.replace(old, new))
332 except ValueError:
333 pass
334 write_file(path, "\n".join(new_contents))
335
336
337def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
338 """ This function will make unlocked-sigs.inc match the recipes in the
339 workspace plus any extras we want unlocked. """
340
341 if not fixed_setup:
342 # Only need to write this out within the eSDK
343 return
344
345 if not extra:
346 extra = []
347
348 confdir = os.path.join(basepath, 'conf')
349 unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
350
351 # Get current unlocked list if any
352 values = {}
353 def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
354 values[varname] = origvalue
355 return origvalue, None, 0, True
356 if os.path.exists(unlockedsigs):
357 with open(unlockedsigs, 'r') as f:
358 bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
359 unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
360
361 # If the new list is different to the current list, write it out
362 newunlocked = sorted(list(workspace.keys()) + extra)
363 if unlocked != newunlocked:
364 bb.utils.mkdirhier(confdir)
365 with open(unlockedsigs, 'w') as f:
366 f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
367 "# This layer was created by the OpenEmbedded devtool" +
368 " utility in order to\n" +
369 "# contain recipes that are unlocked.\n")
370
371 f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
372 for pn in newunlocked:
373 f.write(' ' + pn)
374 f.write('"')
375
376def check_prerelease_version(ver, operation):
377 if 'pre' in ver or 'rc' in ver:
378 logger.warning('Version "%s" looks like a pre-release version. '
379 'If that is the case, in order to ensure that the '
380 'version doesn\'t appear to go backwards when you '
381 'later upgrade to the final release version, it is '
382 'recommmended that instead you use '
383 '<current version>+<pre-release version> e.g. if '
384 'upgrading from 1.9 to 2.0-rc2 use "1.9+2.0-rc2". '
385 'If you prefer not to reset and re-try, you can change '
386 'the version after %s succeeds using "devtool rename" '
387 'with -V/--version.' % (ver, operation))
388
389def check_git_repo_dirty(repodir):
390 """Check if a git repository is clean or not"""
391 stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
392 return stdout
393
394def check_git_repo_op(srctree, ignoredirs=None):
395 """Check if a git repository is in the middle of a rebase"""
396 stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree)
397 topleveldir = stdout.strip()
398 if ignoredirs and topleveldir in ignoredirs:
399 return
400 gitdir = os.path.join(topleveldir, '.git')
401 if os.path.exists(os.path.join(gitdir, 'rebase-merge')):
402 raise DevtoolError("Source tree %s appears to be in the middle of a rebase - please resolve this first" % srctree)
403 if os.path.exists(os.path.join(gitdir, 'rebase-apply')):
404 raise DevtoolError("Source tree %s appears to be in the middle of 'git am' or 'git apply' - please resolve this first" % srctree)
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py
deleted file mode 100644
index 0b2c3d33dc..0000000000
--- a/scripts/lib/devtool/build.py
+++ /dev/null
@@ -1,92 +0,0 @@
1# Development tool - build command plugin
2#
3# Copyright (C) 2014-2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool build plugin"""
8
9import os
10import bb
11import logging
12import argparse
13import tempfile
14from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
15from devtool import parse_recipe
16
17logger = logging.getLogger('devtool')
18
19
20def _set_file_values(fn, values):
21 remaining = list(values.keys())
22
23 def varfunc(varname, origvalue, op, newlines):
24 newvalue = values.get(varname, origvalue)
25 remaining.remove(varname)
26 return (newvalue, '=', 0, True)
27
28 with open(fn, 'r') as f:
29 (updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
30
31 for item in remaining:
32 updated = True
33 newlines.append('%s = "%s"' % (item, values[item]))
34
35 if updated:
36 with open(fn, 'w') as f:
37 f.writelines(newlines)
38 return updated
39
40def _get_build_tasks(config):
41 tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
42 return ['do_%s' % task.strip() for task in tasks]
43
44def build(args, config, basepath, workspace):
45 """Entry point for the devtool 'build' subcommand"""
46 workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
47 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
48 try:
49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
50 if not rd:
51 return 1
52 deploytask = 'do_deploy' in bb.build.listtasks(rd)
53 finally:
54 tinfoil.shutdown()
55
56 if args.clean:
57 # use clean instead of cleansstate to avoid messing things up in eSDK
58 build_tasks = ['do_clean']
59 else:
60 build_tasks = _get_build_tasks(config)
61 if deploytask:
62 build_tasks.append('do_deploy')
63
64 bbappend = workspace[workspacepn]['bbappend']
65 if args.disable_parallel_make:
66 logger.info("Disabling 'make' parallelism")
67 _set_file_values(bbappend, {'PARALLEL_MAKE': ''})
68 try:
69 bbargs = []
70 for task in build_tasks:
71 if args.recipename.endswith('-native') and 'package' in task:
72 continue
73 bbargs.append('%s:%s' % (args.recipename, task))
74 exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
75 except bb.process.ExecutionError as e:
76 # We've already seen the output since watch=True, so just ensure we return something to the user
77 return e.exitcode
78 finally:
79 if args.disable_parallel_make:
80 _set_file_values(bbappend, {'PARALLEL_MAKE': None})
81
82 return 0
83
84def register_commands(subparsers, context):
85 """Register devtool subcommands from this plugin"""
86 parser_build = subparsers.add_parser('build', help='Build a recipe',
87 description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
88 group='working', order=50)
89 parser_build.add_argument('recipename', help='Recipe to build')
90 parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
91 parser_build.add_argument('-c', '--clean', action='store_true', help='clean up recipe building results')
92 parser_build.set_defaults(func=build)
diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py
deleted file mode 100644
index 980f90ddd6..0000000000
--- a/scripts/lib/devtool/build_image.py
+++ /dev/null
@@ -1,164 +0,0 @@
1# Development tool - build-image plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool plugin containing the build-image subcommand."""
9
10import os
11import errno
12import logging
13
14from bb.process import ExecutionError
15from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19class TargetNotImageError(Exception):
20 pass
21
22def _get_packages(tinfoil, workspace, config):
23 """Get list of packages from recipes in the workspace."""
24 result = []
25 for recipe in workspace:
26 data = parse_recipe(config, tinfoil, recipe, True)
27 if 'class-target' in data.getVar('OVERRIDES').split(':'):
28 if recipe in data.getVar('PACKAGES').split():
29 result.append(recipe)
30 else:
31 logger.warning("Skipping recipe %s as it doesn't produce a "
32 "package with the same name", recipe)
33 return result
34
35def build_image(args, config, basepath, workspace):
36 """Entry point for the devtool 'build-image' subcommand."""
37
38 image = args.imagename
39 auto_image = False
40 if not image:
41 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
42 if sdk_targets:
43 image = sdk_targets[0]
44 auto_image = True
45 if not image:
46 raise DevtoolError('Unable to determine image to build, please specify one')
47
48 try:
49 if args.add_packages:
50 add_packages = args.add_packages.split(',')
51 else:
52 add_packages = None
53 result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
54 except TargetNotImageError:
55 if auto_image:
56 raise DevtoolError('Unable to determine image to build, please specify one')
57 else:
58 raise DevtoolError('Specified recipe %s is not an image recipe' % image)
59
60 if result == 0:
61 logger.info('Successfully built %s. You can find output files in %s'
62 % (image, outputdir))
63 return result
64
65def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
66 # remove <image>.bbappend to make sure setup_tinfoil doesn't
67 # break because of it
68 target_basename = config.get('SDK', 'target_basename', '')
69 if target_basename:
70 appendfile = os.path.join(config.workspace_path, 'appends',
71 '%s.bbappend' % target_basename)
72 try:
73 os.unlink(appendfile)
74 except OSError as exc:
75 if exc.errno != errno.ENOENT:
76 raise
77
78 tinfoil = setup_tinfoil(basepath=basepath)
79 try:
80 rd = parse_recipe(config, tinfoil, image, True)
81 if not rd:
82 # Error already shown
83 return (1, None)
84 if not bb.data.inherits_class('image', rd):
85 raise TargetNotImageError()
86
87 # Get the actual filename used and strip the .bb and full path
88 target_basename = rd.getVar('FILE')
89 target_basename = os.path.splitext(os.path.basename(target_basename))[0]
90 config.set('SDK', 'target_basename', target_basename)
91 config.write()
92
93 appendfile = os.path.join(config.workspace_path, 'appends',
94 '%s.bbappend' % target_basename)
95
96 outputdir = None
97 try:
98 if workspace or add_packages:
99 if add_packages:
100 packages = add_packages
101 else:
102 packages = _get_packages(tinfoil, workspace, config)
103 else:
104 packages = None
105 if not task:
106 if not packages and not add_packages and workspace:
107 logger.warning('No recipes in workspace, building image %s unmodified', image)
108 elif not packages:
109 logger.warning('No packages to add, building image %s unmodified', image)
110
111 if packages or extra_append:
112 bb.utils.mkdirhier(os.path.dirname(appendfile))
113 with open(appendfile, 'w') as afile:
114 if packages:
115 # include packages from workspace recipes into the image
116 afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages))
117 if not task:
118 logger.info('Building image %s with the following '
119 'additional packages: %s', image, ' '.join(packages))
120 if extra_append:
121 for line in extra_append:
122 afile.write('%s\n' % line)
123
124 if task in ['populate_sdk', 'populate_sdk_ext']:
125 outputdir = rd.getVar('SDK_DEPLOY')
126 else:
127 outputdir = rd.getVar('DEPLOY_DIR_IMAGE')
128
129 tmp_tinfoil = tinfoil
130 tinfoil = None
131 tmp_tinfoil.shutdown()
132
133 options = ''
134 if task:
135 options += '-c %s' % task
136
137 # run bitbake to build image (or specified task)
138 try:
139 exec_build_env_command(config.init_path, basepath,
140 'bitbake %s %s' % (options, image), watch=True)
141 except ExecutionError as err:
142 return (err.exitcode, None)
143 finally:
144 if os.path.isfile(appendfile):
145 os.unlink(appendfile)
146 finally:
147 if tinfoil:
148 tinfoil.shutdown()
149 return (0, outputdir)
150
151
152def register_commands(subparsers, context):
153 """Register devtool subcommands from the build-image plugin"""
154 parser = subparsers.add_parser('build-image',
155 help='Build image including workspace recipe packages',
156 description='Builds an image, extending it to include '
157 'packages from recipes in the workspace',
158 group='testbuild', order=-10)
159 parser.add_argument('imagename', help='Image recipe to build', nargs='?')
160 parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
161 'entire workspace, specify packages to be added to the image '
162 '(separate multiple packages by commas)',
163 metavar='PACKAGES')
164 parser.set_defaults(func=build_image)
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
deleted file mode 100644
index 990303982c..0000000000
--- a/scripts/lib/devtool/build_sdk.py
+++ /dev/null
@@ -1,48 +0,0 @@
1# Development tool - build-sdk command plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import logging
9from devtool import DevtoolError
10from devtool import build_image
11
12logger = logging.getLogger('devtool')
13
14
15def build_sdk(args, config, basepath, workspace):
16 """Entry point for the devtool build-sdk command"""
17
18 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
19 if sdk_targets:
20 image = sdk_targets[0]
21 else:
22 raise DevtoolError('Unable to determine image to build SDK for')
23
24 extra_append = ['SDK_DERIVATIVE = "1"']
25 try:
26 result, outputdir = build_image.build_image_task(config,
27 basepath,
28 workspace,
29 image,
30 task='populate_sdk_ext',
31 extra_append=extra_append)
32 except build_image.TargetNotImageError:
33 raise DevtoolError('Unable to determine image to build SDK for')
34
35 if result == 0:
36 logger.info('Successfully built SDK. You can find output files in %s'
37 % outputdir)
38 return result
39
40
41def register_commands(subparsers, context):
42 """Register devtool subcommands"""
43 if context.fixed_setup:
44 parser_build_sdk = subparsers.add_parser('build-sdk',
45 help='Build a derivative SDK of this one',
46 description='Builds an extensible SDK based upon this one and the items in your workspace',
47 group='advanced')
48 parser_build_sdk.set_defaults(func=build_sdk)
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
deleted file mode 100644
index b5ca8f2c2f..0000000000
--- a/scripts/lib/devtool/deploy.py
+++ /dev/null
@@ -1,378 +0,0 @@
1# Development tool - deploy/undeploy command plugin
2#
3# Copyright (C) 2014-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool plugin containing the deploy subcommands"""
8
9import logging
10import os
11import shutil
12import subprocess
13import tempfile
14
15import bb.utils
16import argparse_oe
17import oe.types
18
19from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
20
21logger = logging.getLogger('devtool')
22
23deploylist_path = '/.devtool'
24
25def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
26 """
27 Prepare a shell script for running on the target to
28 deploy/undeploy files. We have to be careful what we put in this
29 script - only commands that are likely to be available on the
30 target are suitable (the target might be constrained, e.g. using
31 busybox rather than bash with coreutils).
32 """
33 lines = []
34 lines.append('#!/bin/sh')
35 lines.append('set -e')
36 if undeployall:
37 # Yes, I know this is crude - but it does work
38 lines.append('for entry in %s/*.list; do' % deploylist_path)
39 lines.append('[ ! -f $entry ] && exit')
40 lines.append('set `basename $entry | sed "s/.list//"`')
41 if dryrun:
42 if not deploy:
43 lines.append('echo "Previously deployed files for $1:"')
44 lines.append('manifest="%s/$1.list"' % deploylist_path)
45 lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
46 lines.append('if [ -f $manifest ] ; then')
47 # Read manifest in reverse and delete files / remove empty dirs
48 lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
49 lines.append(' do')
50 if dryrun:
51 lines.append(' if [ ! -d $file ] ; then')
52 lines.append(' echo $file')
53 lines.append(' fi')
54 else:
55 lines.append(' if [ -d $file ] ; then')
56 # Avoid deleting a preserved directory in case it has special perms
57 lines.append(' if [ ! -d $preservedir/$file ] ; then')
58 lines.append(' rmdir $file > /dev/null 2>&1 || true')
59 lines.append(' fi')
60 lines.append(' else')
61 lines.append(' rm -f $file')
62 lines.append(' fi')
63 lines.append(' done')
64 if not dryrun:
65 lines.append(' rm $manifest')
66 if not deploy and not dryrun:
67 # May as well remove all traces
68 lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
69 lines.append('fi')
70
71 if deploy:
72 if not nocheckspace:
73 # Check for available space
74 # FIXME This doesn't take into account files spread across multiple
75 # partitions, but doing that is non-trivial
76 # Find the part of the destination path that exists
77 lines.append('checkpath="$2"')
78 lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
79 lines.append('do')
80 lines.append(' checkpath=`dirname "$checkpath"`')
81 lines.append('done')
82 lines.append(r'freespace=$(df -P $checkpath | sed -nre "s/^(\S+\s+){3}([0-9]+).*/\2/p")')
83 # First line of the file is the total space
84 lines.append('total=`head -n1 $3`')
85 lines.append('if [ $total -gt $freespace ] ; then')
86 lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
87 lines.append(' exit 1')
88 lines.append('fi')
89 if not nopreserve:
90 # Preserve any files that exist. Note that this will add to the
91 # preserved list with successive deployments if the list of files
92 # deployed changes, but because we've deleted any previously
93 # deployed files at this point it will never preserve anything
94 # that was deployed, only files that existed prior to any deploying
95 # (which makes the most sense)
96 lines.append('cat $3 | sed "1d" | while read file fsize')
97 lines.append('do')
98 lines.append(' if [ -e $file ] ; then')
99 lines.append(' dest="$preservedir/$file"')
100 lines.append(' mkdir -p `dirname $dest`')
101 lines.append(' mv $file $dest')
102 lines.append(' fi')
103 lines.append('done')
104 lines.append('rm $3')
105 lines.append('mkdir -p `dirname $manifest`')
106 lines.append('mkdir -p $2')
107 if verbose:
108 lines.append(' tar xv -C $2 -f - | tee $manifest')
109 else:
110 lines.append(' tar xv -C $2 -f - > $manifest')
111 lines.append('sed -i "s!^./!$2!" $manifest')
112 elif not dryrun:
113 # Put any preserved files back
114 lines.append('if [ -d $preservedir ] ; then')
115 lines.append(' cd $preservedir')
116 # find from busybox might not have -exec, so we don't use that
117 lines.append(' find . -type f | while read file')
118 lines.append(' do')
119 lines.append(' mv $file /$file')
120 lines.append(' done')
121 lines.append(' cd /')
122 lines.append(' rm -rf $preservedir')
123 lines.append('fi')
124
125 if undeployall:
126 if not dryrun:
127 lines.append('echo "NOTE: Successfully undeployed $1"')
128 lines.append('done')
129
130 # Delete the script itself
131 lines.append('rm $0')
132 lines.append('')
133
134 return '\n'.join(lines)
135
136def deploy(args, config, basepath, workspace):
137 """Entry point for the devtool 'deploy' subcommand"""
138 import oe.utils
139
140 check_workspace_recipe(workspace, args.recipename, checksrc=False)
141
142 tinfoil = setup_tinfoil(basepath=basepath)
143 try:
144 try:
145 rd = tinfoil.parse_recipe(args.recipename)
146 except Exception as e:
147 raise DevtoolError('Exception parsing recipe %s: %s' %
148 (args.recipename, e))
149
150 srcdir = rd.getVar('D')
151 workdir = rd.getVar('WORKDIR')
152 path = rd.getVar('PATH')
153 strip_cmd = rd.getVar('STRIP')
154 libdir = rd.getVar('libdir')
155 base_libdir = rd.getVar('base_libdir')
156 max_process = oe.utils.get_bb_number_threads(rd)
157 fakerootcmd = rd.getVar('FAKEROOTCMD')
158 fakerootenv = rd.getVar('FAKEROOTENV')
159 finally:
160 tinfoil.shutdown()
161
162 return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
163
164def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
165 import math
166 import oe.package
167
168 try:
169 host, destdir = args.target.split(':')
170 except ValueError:
171 destdir = '/'
172 else:
173 args.target = host
174 if not destdir.endswith('/'):
175 destdir += '/'
176
177 recipe_outdir = srcdir
178 if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
179 raise DevtoolError('No files to deploy - have you built the %s '
180 'recipe? If so, the install step has not installed '
181 'any files.' % args.recipename)
182
183 if args.strip and not args.dry_run:
184 # Fakeroot copy to new destination
185 srcdir = recipe_outdir
186 recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
187 if os.path.isdir(recipe_outdir):
188 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
189 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
190 os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
191 oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
192
193 filelist = []
194 inodes = set({})
195 ftotalsize = 0
196 for root, _, files in os.walk(recipe_outdir):
197 for fn in files:
198 fstat = os.lstat(os.path.join(root, fn))
199 # Get the size in kiB (since we'll be comparing it to the output of du -k)
200 # MUST use lstat() here not stat() or getfilesize() since we don't want to
201 # dereference symlinks
202 if fstat.st_ino in inodes:
203 fsize = 0
204 else:
205 fsize = int(math.ceil(float(fstat.st_size)/1024))
206 inodes.add(fstat.st_ino)
207 ftotalsize += fsize
208 # The path as it would appear on the target
209 fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
210 filelist.append((fpath, fsize))
211
212 if args.dry_run:
213 print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
214 for item, _ in filelist:
215 print(' %s' % item)
216 return 0
217
218 extraoptions = ''
219 if args.no_host_check:
220 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
221 if not args.show_status:
222 extraoptions += ' -q'
223
224 scp_sshexec = ''
225 ssh_sshexec = 'ssh'
226 if args.ssh_exec:
227 scp_sshexec = "-S %s" % args.ssh_exec
228 ssh_sshexec = args.ssh_exec
229 scp_port = ''
230 ssh_port = ''
231 if args.port:
232 scp_port = "-P %s" % args.port
233 ssh_port = "-p %s" % args.port
234
235 if args.key:
236 extraoptions += ' -i %s' % args.key
237
238 # In order to delete previously deployed files and have the manifest file on
239 # the target, we write out a shell script and then copy it to the target
240 # so we can then run it (piping tar output to it).
241 # (We cannot use scp here, because it doesn't preserve symlinks.)
242 tmpdir = tempfile.mkdtemp(prefix='devtool')
243 try:
244 tmpscript = '/tmp/devtool_deploy.sh'
245 tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
246 shellscript = _prepare_remote_script(deploy=True,
247 verbose=args.show_status,
248 nopreserve=args.no_preserve,
249 nocheckspace=args.no_check_space)
250 # Write out the script to a file
251 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
252 f.write(shellscript)
253 # Write out the file list
254 with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
255 f.write('%d\n' % ftotalsize)
256 for fpath, fsize in filelist:
257 f.write('%s %d\n' % (fpath, fsize))
258 # Copy them to the target
259 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
260 if ret != 0:
261 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
262 'get a complete error message' % args.target)
263 finally:
264 shutil.rmtree(tmpdir)
265
266 # Now run the script
267 ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
268 if ret != 0:
269 raise DevtoolError('Deploy failed - rerun with -s to get a complete '
270 'error message')
271
272 logger.info('Successfully deployed %s' % recipe_outdir)
273
274 files_list = []
275 for root, _, files in os.walk(recipe_outdir):
276 for filename in files:
277 filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
278 files_list.append(os.path.join(destdir, filename))
279
280 return 0
281
282def undeploy(args, config, basepath, workspace):
283 """Entry point for the devtool 'undeploy' subcommand"""
284 if args.all and args.recipename:
285 raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
286 elif not args.recipename and not args.all:
287 raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
288
289 extraoptions = ''
290 if args.no_host_check:
291 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
292 if not args.show_status:
293 extraoptions += ' -q'
294
295 scp_sshexec = ''
296 ssh_sshexec = 'ssh'
297 if args.ssh_exec:
298 scp_sshexec = "-S %s" % args.ssh_exec
299 ssh_sshexec = args.ssh_exec
300 scp_port = ''
301 ssh_port = ''
302 if args.port:
303 scp_port = "-P %s" % args.port
304 ssh_port = "-p %s" % args.port
305
306 args.target = args.target.split(':')[0]
307
308 tmpdir = tempfile.mkdtemp(prefix='devtool')
309 try:
310 tmpscript = '/tmp/devtool_undeploy.sh'
311 shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
312 # Write out the script to a file
313 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
314 f.write(shellscript)
315 # Copy it to the target
316 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
317 if ret != 0:
318 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
319 'get a complete error message' % args.target)
320 finally:
321 shutil.rmtree(tmpdir)
322
323 # Now run the script
324 ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
325 if ret != 0:
326 raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
327 'error message')
328
329 if not args.all and not args.dry_run:
330 logger.info('Successfully undeployed %s' % args.recipename)
331 return 0
332
333
334def register_commands(subparsers, context):
335 """Register devtool subcommands from the deploy plugin"""
336
337 parser_deploy = subparsers.add_parser('deploy-target',
338 help='Deploy recipe output files to live target machine',
339 description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
340 group='testbuild')
341 parser_deploy.add_argument('recipename', help='Recipe to deploy')
342 parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
343 parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
344 parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
345 parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
346 parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
347 parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
348 parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
349 parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
350 parser_deploy.add_argument('-I', '--key',
351 help='Specify ssh private key for connection to the target')
352
353 strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
354 strip_opts.add_argument('-S', '--strip',
355 help='Strip executables prior to deploying (default: %(default)s). '
356 'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
357 default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
358 action='store_true')
359 strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
360
361 parser_deploy.set_defaults(func=deploy)
362
363 parser_undeploy = subparsers.add_parser('undeploy-target',
364 help='Undeploy recipe output files in live target machine',
365 description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
366 group='testbuild')
367 parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
368 parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
369 parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
370 parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
371 parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
372 parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
373 parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
374 parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
375 parser_undeploy.add_argument('-I', '--key',
376 help='Specify ssh private key for connection to the target')
377
378 parser_undeploy.set_defaults(func=undeploy)
diff --git a/scripts/lib/devtool/export.py b/scripts/lib/devtool/export.py
deleted file mode 100644
index 01174edae5..0000000000
--- a/scripts/lib/devtool/export.py
+++ /dev/null
@@ -1,109 +0,0 @@
1# Development tool - export command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool export plugin"""
8
9import os
10import argparse
11import tarfile
12import logging
13import datetime
14import json
15
16logger = logging.getLogger('devtool')
17
18# output files
19default_arcname_prefix = "workspace-export"
20metadata = '.export_metadata'
21
22def export(args, config, basepath, workspace):
23 """Entry point for the devtool 'export' subcommand"""
24
25 def add_metadata(tar):
26 """Archive the workspace object"""
27 # finally store the workspace metadata
28 with open(metadata, 'w') as fd:
29 fd.write(json.dumps((config.workspace_path, workspace)))
30 tar.add(metadata)
31 os.unlink(metadata)
32
33 def add_recipe(tar, recipe, data):
34 """Archive recipe with proper arcname"""
35 # Create a map of name/arcnames
36 arcnames = []
37 for key, name in data.items():
38 if name:
39 if key == 'srctree':
40 # all sources, no matter where are located, goes into the sources directory
41 arcname = 'sources/%s' % recipe
42 else:
43 arcname = name.replace(config.workspace_path, '')
44 arcnames.append((name, arcname))
45
46 for name, arcname in arcnames:
47 tar.add(name, arcname=arcname)
48
49
50 # Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
51 if not workspace:
52 logger.info('Workspace contains no recipes, nothing to export')
53 return 0
54 else:
55 for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
56 for recipe in recipes:
57 if recipe not in workspace:
58 logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
59 return 1
60
61 name = args.file
62
63 default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
64 if not name:
65 name = default_name
66 else:
67 # if name is a directory, append the default name
68 if os.path.isdir(name):
69 name = os.path.join(name, default_name)
70
71 if os.path.exists(name) and not args.overwrite:
72 logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
73 return 1
74
75 # if all workspace is excluded, quit
76 if not len(set(workspace.keys()).difference(set(args.exclude))):
77 logger.warning('All recipes in workspace excluded, nothing to export')
78 return 0
79
80 exported = []
81 with tarfile.open(name, 'w:gz') as tar:
82 if args.include:
83 for recipe in args.include:
84 add_recipe(tar, recipe, workspace[recipe])
85 exported.append(recipe)
86 else:
87 for recipe, data in workspace.items():
88 if recipe not in args.exclude:
89 add_recipe(tar, recipe, data)
90 exported.append(recipe)
91
92 add_metadata(tar)
93
94 logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
95 return 0
96
97def register_commands(subparsers, context):
98 """Register devtool export subcommands"""
99 parser = subparsers.add_parser('export',
100 help='Export workspace into a tar archive',
101 description='Export one or more recipes from current workspace into a tar archive',
102 group='advanced')
103
104 parser.add_argument('--file', '-f', help='Output archive file name')
105 parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
106 group = parser.add_mutually_exclusive_group()
107 group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
108 group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
109 parser.set_defaults(func=export)
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py
deleted file mode 100644
index 19c2f61c5f..0000000000
--- a/scripts/lib/devtool/ide_plugins/__init__.py
+++ /dev/null
@@ -1,282 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
7
8import errno
9import json
10import logging
11import os
12import stat
13from enum import Enum, auto
14from devtool import DevtoolError
15from bb.utils import mkdirhier
16
17logger = logging.getLogger('devtool')
18
19
20class BuildTool(Enum):
21 UNDEFINED = auto()
22 CMAKE = auto()
23 MESON = auto()
24
25 @property
26 def is_c_ccp(self):
27 if self is BuildTool.CMAKE:
28 return True
29 if self is BuildTool.MESON:
30 return True
31 return False
32
33
34class GdbCrossConfig:
35 """Base class defining the GDB configuration generator interface
36
37 Generate a GDB configuration for a binary on the target device.
38 Only one instance per binary is allowed. This allows to assign unique port
39 numbers for all gdbserver instances.
40 """
41 _gdbserver_port_next = 1234
42 _binaries = []
43
44 def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
45 self.image_recipe = image_recipe
46 self.modified_recipe = modified_recipe
47 self.gdb_cross = modified_recipe.gdb_cross
48 self.binary = binary
49 if binary in GdbCrossConfig._binaries:
50 raise DevtoolError(
51 "gdbserver config for binary %s is already generated" % binary)
52 GdbCrossConfig._binaries.append(binary)
53 self.script_dir = modified_recipe.ide_sdk_scripts_dir
54 self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
55 self.gdbserver_multi = gdbserver_multi
56 self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
57 self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
58 GdbCrossConfig._gdbserver_port_next += 1
59 self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
60 # gdbserver start script
61 gdbserver_script_file = 'gdbserver_' + self.id_pretty
62 if self.gdbserver_multi:
63 gdbserver_script_file += "_m"
64 self.gdbserver_script = os.path.join(
65 self.script_dir, gdbserver_script_file)
66 # gdbinit file
67 self.gdbinit = os.path.join(
68 self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
69 # gdb start script
70 self.gdb_script = os.path.join(
71 self.script_dir, 'gdb_' + self.id_pretty)
72
73 def _gen_gdbserver_start_script(self):
74 """Generate a shell command starting the gdbserver on the remote device via ssh
75
76 GDB supports two modes:
77 multi: gdbserver remains running over several debug sessions
78 once: gdbserver terminates after the debugged process terminates
79 """
80 cmd_lines = ['#!/bin/sh']
81 if self.gdbserver_multi:
82 temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
83 gdbserver_cmd_start = temp_dir
84 gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
85 gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
86 gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
87 self.gdb_cross.gdbserver_path, self.gdbserver_port)
88 gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
89
90 gdbserver_cmd_stop = temp_dir
91 gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
92 gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
93
94 gdbserver_cmd_l = []
95 gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
96 gdbserver_cmd_l.append(' shift')
97 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
98 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
99 gdbserver_cmd_l.append('else')
100 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
101 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
102 gdbserver_cmd_l.append('fi')
103 gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
104 else:
105 gdbserver_cmd_start = "%s --once :%s %s" % (
106 self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
107 gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
108 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
109 cmd_lines.append(gdbserver_cmd)
110 GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
111
112 def _gen_gdbinit_config(self):
113 """Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
114 gdbinit_lines = ['# This file is generated by devtool ide-sdk']
115 if self.gdbserver_multi:
116 target_help = '# gdbserver --multi :%d' % self.gdbserver_port
117 remote_cmd = 'target extended-remote'
118 else:
119 target_help = '# gdbserver :%d %s' % (
120 self.gdbserver_port, self.binary)
121 remote_cmd = 'target remote'
122 gdbinit_lines.append('# On the remote target:')
123 gdbinit_lines.append(target_help)
124 gdbinit_lines.append('# On the build machine:')
125 gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
126 gdbinit_lines.append(
127 '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
128
129 gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
130 gdbinit_lines.append('set substitute-path "/usr/include" "' +
131 os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
132 # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
133 gdbinit_lines.append('set debuginfod enabled off')
134 if self.image_recipe.rootfs_dbg:
135 gdbinit_lines.append(
136 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
137 # First: Search for sources of this recipe in the workspace folder
138 if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
139 gdbinit_lines.append('set substitute-path "%s" "%s"' %
140 (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
141 else:
142 logger.error(
143 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
144 # Second: Search for sources of other recipes in the rootfs-dbg
145 if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
146 gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
147 self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
148 else:
149 logger.error(
150 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
151 else:
152 logger.warning(
153 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
154 gdbinit_lines.append(
155 '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
156 gdbinit_lines.append('set remote exec-file ' + self.binary)
157 gdbinit_lines.append(
158 'run ' + os.path.join(self.modified_recipe.d, self.binary))
159
160 GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
161
162 def _gen_gdb_start_script(self):
163 """Generate a script starting GDB with the corresponding gdbinit configuration."""
164 cmd_lines = ['#!/bin/sh']
165 cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
166 cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
167 self.gdbinit + ' "$@"')
168 GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
169
170 def initialize(self):
171 self._gen_gdbserver_start_script()
172 self._gen_gdbinit_config()
173 self._gen_gdb_start_script()
174
175 @staticmethod
176 def write_file(script_file, cmd_lines, executable=False):
177 script_dir = os.path.dirname(script_file)
178 mkdirhier(script_dir)
179 with open(script_file, 'w') as script_f:
180 script_f.write(os.linesep.join(cmd_lines))
181 script_f.write(os.linesep)
182 if executable:
183 st = os.stat(script_file)
184 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
185 logger.info("Created: %s" % script_file)
186
187
188class IdeBase:
189 """Base class defining the interface for IDE plugins"""
190
191 def __init__(self):
192 self.ide_name = 'undefined'
193 self.gdb_cross_configs = []
194
195 @classmethod
196 def ide_plugin_priority(cls):
197 """Used to find the default ide handler if --ide is not passed"""
198 return 10
199
200 def setup_shared_sysroots(self, shared_env):
201 logger.warn("Shared sysroot mode is not supported for IDE %s" %
202 self.ide_name)
203
204 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
205 logger.warn("Modified recipe mode is not supported for IDE %s" %
206 self.ide_name)
207
208 def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
209 binaries = modified_recipe.find_installed_binaries()
210 for binary in binaries:
211 gdb_cross_config = gdb_cross_config_class(
212 image_recipe, modified_recipe, binary)
213 gdb_cross_config.initialize()
214 self.gdb_cross_configs.append(gdb_cross_config)
215
216 @staticmethod
217 def gen_oe_scrtips_sym_link(modified_recipe):
218 # create a sym-link from sources to the scripts directory
219 if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
220 IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
221 os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
222
223 @staticmethod
224 def update_json_file(json_dir, json_file, update_dict):
225 """Update a json file
226
227 By default it uses the dict.update function. If this is not sutiable
228 the update function might be passed via update_func parameter.
229 """
230 json_path = os.path.join(json_dir, json_file)
231 logger.info("Updating IDE config file: %s (%s)" %
232 (json_file, json_path))
233 if not os.path.exists(json_dir):
234 os.makedirs(json_dir)
235 try:
236 with open(json_path) as f:
237 orig_dict = json.load(f)
238 except json.decoder.JSONDecodeError:
239 logger.info(
240 "Decoding %s failed. Probably because of comments in the json file" % json_path)
241 orig_dict = {}
242 except FileNotFoundError:
243 orig_dict = {}
244 orig_dict.update(update_dict)
245 with open(json_path, 'w') as f:
246 json.dump(orig_dict, f, indent=4)
247
248 @staticmethod
249 def symlink_force(tgt, dst):
250 try:
251 os.symlink(tgt, dst)
252 except OSError as err:
253 if err.errno == errno.EEXIST:
254 if os.readlink(dst) != tgt:
255 os.remove(dst)
256 os.symlink(tgt, dst)
257 else:
258 raise err
259
260
261def get_devtool_deploy_opts(args):
262 """Filter args for devtool deploy-target args"""
263 if not args.target:
264 return None
265 devtool_deploy_opts = [args.target]
266 if args.no_host_check:
267 devtool_deploy_opts += ["-c"]
268 if args.show_status:
269 devtool_deploy_opts += ["-s"]
270 if args.no_preserve:
271 devtool_deploy_opts += ["-p"]
272 if args.no_check_space:
273 devtool_deploy_opts += ["--no-check-space"]
274 if args.ssh_exec:
275 devtool_deploy_opts += ["-e", args.ssh.exec]
276 if args.port:
277 devtool_deploy_opts += ["-P", args.port]
278 if args.key:
279 devtool_deploy_opts += ["-I", args.key]
280 if args.strip is False:
281 devtool_deploy_opts += ["--no-strip"]
282 return devtool_deploy_opts
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
deleted file mode 100644
index a62b93224e..0000000000
--- a/scripts/lib/devtool/ide_plugins/ide_code.py
+++ /dev/null
@@ -1,463 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
7
8import json
9import logging
10import os
11import shutil
12from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
13
14logger = logging.getLogger('devtool')
15
16
17class GdbCrossConfigVSCode(GdbCrossConfig):
18 def __init__(self, image_recipe, modified_recipe, binary):
19 super().__init__(image_recipe, modified_recipe, binary, False)
20
21 def initialize(self):
22 self._gen_gdbserver_start_script()
23
24
25class IdeVSCode(IdeBase):
26 """Manage IDE configurations for VSCode
27
28 Modified recipe mode:
29 - cmake: use the cmake-preset generated by devtool ide-sdk
30 - meson: meson is called via a wrapper script generated by devtool ide-sdk
31
32 Shared sysroot mode:
33 In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
34 A workspace cannot be created because there is no recipe that defines how a workspace could
35 be set up.
36 - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
37 The cmake-kit uses the environment script and the tool-chain file
38 generated by meta-ide-support.
39 - meson: Meson needs manual workspace configuration.
40 """
41
42 @classmethod
43 def ide_plugin_priority(cls):
44 """If --ide is not passed this is the default plugin"""
45 if shutil.which('code'):
46 return 100
47 return 0
48
49 def setup_shared_sysroots(self, shared_env):
50 """Expose the toolchain of the shared sysroots SDK"""
51 datadir = shared_env.ide_support.datadir
52 deploy_dir_image = shared_env.ide_support.deploy_dir_image
53 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
54 standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
55 vscode_ws_path = os.path.join(
56 os.environ['HOME'], '.local', 'share', 'CMakeTools')
57 cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
58 oecmake_generator = "Ninja"
59 env_script = os.path.join(
60 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
61
62 if not os.path.isdir(vscode_ws_path):
63 os.makedirs(vscode_ws_path)
64 cmake_kits_old = []
65 if os.path.exists(cmake_kits_path):
66 with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
67 cmake_kits_old = json.load(cmake_kits_file)
68 cmake_kits = cmake_kits_old.copy()
69
70 cmake_kit_new = {
71 "name": "OE " + real_multimach_target_sys,
72 "environmentSetupScript": env_script,
73 "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
74 "preferredGenerator": {
75 "name": oecmake_generator
76 }
77 }
78
79 def merge_kit(cmake_kits, cmake_kit_new):
80 i = 0
81 while i < len(cmake_kits):
82 if 'environmentSetupScript' in cmake_kits[i] and \
83 cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
84 cmake_kits[i] = cmake_kit_new
85 return
86 i += 1
87 cmake_kits.append(cmake_kit_new)
88 merge_kit(cmake_kits, cmake_kit_new)
89
90 if cmake_kits != cmake_kits_old:
91 logger.info("Updating: %s" % cmake_kits_path)
92 with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
93 json.dump(cmake_kits, cmake_kits_file, indent=4)
94 else:
95 logger.info("Already up to date: %s" % cmake_kits_path)
96
97 cmake_native = os.path.join(
98 shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
99 if os.path.isfile(cmake_native):
100 logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
101 else:
102 logger.error("Cannot find cmake native at: %s" % cmake_native)
103
104 def dot_code_dir(self, modified_recipe):
105 return os.path.join(modified_recipe.srctree, '.vscode')
106
107 def __vscode_settings_meson(self, settings_dict, modified_recipe):
108 if modified_recipe.build_tool is not BuildTool.MESON:
109 return
110 settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
111
112 confopts = modified_recipe.mesonopts.split()
113 confopts += modified_recipe.meson_cross_file.split()
114 confopts += modified_recipe.extra_oemeson.split()
115 settings_dict["mesonbuild.configureOptions"] = confopts
116 settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
117
118 def __vscode_settings_cmake(self, settings_dict, modified_recipe):
119 """Add cmake specific settings to settings.json.
120
121 Note: most settings are passed to the cmake preset.
122 """
123 if modified_recipe.build_tool is not BuildTool.CMAKE:
124 return
125 settings_dict["cmake.configureOnOpen"] = True
126 settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
127
128 def vscode_settings(self, modified_recipe, image_recipe):
129 files_excludes = {
130 "**/.git/**": True,
131 "**/oe-logs/**": True,
132 "**/oe-workdir/**": True,
133 "**/source-date-epoch/**": True
134 }
135 python_exclude = [
136 "**/.git/**",
137 "**/oe-logs/**",
138 "**/oe-workdir/**",
139 "**/source-date-epoch/**"
140 ]
141 files_readonly = {
142 modified_recipe.recipe_sysroot + '/**': True,
143 modified_recipe.recipe_sysroot_native + '/**': True,
144 }
145 if image_recipe.rootfs_dbg is not None:
146 files_readonly[image_recipe.rootfs_dbg + '/**'] = True
147 settings_dict = {
148 "files.watcherExclude": files_excludes,
149 "files.exclude": files_excludes,
150 "files.readonlyInclude": files_readonly,
151 "python.analysis.exclude": python_exclude
152 }
153 self.__vscode_settings_cmake(settings_dict, modified_recipe)
154 self.__vscode_settings_meson(settings_dict, modified_recipe)
155
156 settings_file = 'settings.json'
157 IdeBase.update_json_file(
158 self.dot_code_dir(modified_recipe), settings_file, settings_dict)
159
160 def __vscode_extensions_cmake(self, modified_recipe, recommendations):
161 if modified_recipe.build_tool is not BuildTool.CMAKE:
162 return
163 recommendations += [
164 "twxs.cmake",
165 "ms-vscode.cmake-tools",
166 "ms-vscode.cpptools",
167 "ms-vscode.cpptools-extension-pack",
168 "ms-vscode.cpptools-themes"
169 ]
170
171 def __vscode_extensions_meson(self, modified_recipe, recommendations):
172 if modified_recipe.build_tool is not BuildTool.MESON:
173 return
174 recommendations += [
175 'mesonbuild.mesonbuild',
176 "ms-vscode.cpptools",
177 "ms-vscode.cpptools-extension-pack",
178 "ms-vscode.cpptools-themes"
179 ]
180
181 def vscode_extensions(self, modified_recipe):
182 recommendations = []
183 self.__vscode_extensions_cmake(modified_recipe, recommendations)
184 self.__vscode_extensions_meson(modified_recipe, recommendations)
185 extensions_file = 'extensions.json'
186 IdeBase.update_json_file(
187 self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
188
189 def vscode_c_cpp_properties(self, modified_recipe):
190 properties_dict = {
191 "name": modified_recipe.recipe_id_pretty,
192 }
193 if modified_recipe.build_tool is BuildTool.CMAKE:
194 properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
195 elif modified_recipe.build_tool is BuildTool.MESON:
196 properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
197 properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
198 else: # no C/C++ build
199 return
200
201 properties_dicts = {
202 "configurations": [
203 properties_dict
204 ],
205 "version": 4
206 }
207 prop_file = 'c_cpp_properties.json'
208 IdeBase.update_json_file(
209 self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
210
211 def vscode_launch_bin_dbg(self, gdb_cross_config):
212 modified_recipe = gdb_cross_config.modified_recipe
213
214 launch_config = {
215 "name": gdb_cross_config.id_pretty,
216 "type": "cppdbg",
217 "request": "launch",
218 "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
219 "stopAtEntry": True,
220 "cwd": "${workspaceFolder}",
221 "environment": [],
222 "externalConsole": False,
223 "MIMode": "gdb",
224 "preLaunchTask": gdb_cross_config.id_pretty,
225 "miDebuggerPath": modified_recipe.gdb_cross.gdb,
226 "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
227 }
228
229 # Search for header files in recipe-sysroot.
230 src_file_map = {
231 "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
232 }
233 # First of all search for not stripped binaries in the image folder.
234 # These binaries are copied (and optionally stripped) by deploy-target
235 setup_commands = [
236 {
237 "description": "sysroot",
238 "text": "set sysroot " + modified_recipe.d
239 }
240 ]
241
242 if gdb_cross_config.image_recipe.rootfs_dbg:
243 launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
244 gdb_cross_config.image_recipe)
245 # First: Search for sources of this recipe in the workspace folder
246 if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
247 src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
248 else:
249 logger.error(
250 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
251 # Second: Search for sources of other recipes in the rootfs-dbg
252 if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
253 src_file_map["/usr/src/debug"] = os.path.join(
254 gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
255 else:
256 logger.error(
257 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
258 else:
259 logger.warning(
260 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
261
262 launch_config['sourceFileMap'] = src_file_map
263 launch_config['setupCommands'] = setup_commands
264 return launch_config
265
266 def vscode_launch(self, modified_recipe):
267 """GDB Launch configuration for binaries (elf files)"""
268
269 configurations = []
270 for gdb_cross_config in self.gdb_cross_configs:
271 if gdb_cross_config.modified_recipe is modified_recipe:
272 configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
273 launch_dict = {
274 "version": "0.2.0",
275 "configurations": configurations
276 }
277 launch_file = 'launch.json'
278 IdeBase.update_json_file(
279 self.dot_code_dir(modified_recipe), launch_file, launch_dict)
280
281 def vscode_tasks_cpp(self, args, modified_recipe):
282 run_install_deploy = modified_recipe.gen_install_deploy_script(args)
283 install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
284 tasks_dict = {
285 "version": "2.0.0",
286 "tasks": [
287 {
288 "label": install_task_name,
289 "type": "shell",
290 "command": run_install_deploy,
291 "problemMatcher": []
292 }
293 ]
294 }
295 for gdb_cross_config in self.gdb_cross_configs:
296 if gdb_cross_config.modified_recipe is not modified_recipe:
297 continue
298 tasks_dict['tasks'].append(
299 {
300 "label": gdb_cross_config.id_pretty,
301 "type": "shell",
302 "isBackground": True,
303 "dependsOn": [
304 install_task_name
305 ],
306 "command": gdb_cross_config.gdbserver_script,
307 "problemMatcher": [
308 {
309 "pattern": [
310 {
311 "regexp": ".",
312 "file": 1,
313 "location": 2,
314 "message": 3
315 }
316 ],
317 "background": {
318 "activeOnStart": True,
319 "beginsPattern": ".",
320 "endsPattern": ".",
321 }
322 }
323 ]
324 })
325 tasks_file = 'tasks.json'
326 IdeBase.update_json_file(
327 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
328
329 def vscode_tasks_fallback(self, args, modified_recipe):
330 oe_init_dir = modified_recipe.oe_init_dir
331 oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
332 dt_build = "devtool build "
333 dt_build_label = dt_build + modified_recipe.recipe_id_pretty
334 dt_build_cmd = dt_build + modified_recipe.bpn
335 clean_opt = " --clean"
336 dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
337 dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
338 dt_deploy = "devtool deploy-target "
339 dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
340 dt_deploy_cmd = dt_deploy + modified_recipe.bpn
341 dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
342 deploy_opts = ' '.join(get_devtool_deploy_opts(args))
343 tasks_dict = {
344 "version": "2.0.0",
345 "tasks": [
346 {
347 "label": dt_build_label,
348 "type": "shell",
349 "command": "bash",
350 "linux": {
351 "options": {
352 "cwd": oe_init_dir
353 }
354 },
355 "args": [
356 "--login",
357 "-c",
358 "%s%s" % (oe_init, dt_build_cmd)
359 ],
360 "problemMatcher": []
361 },
362 {
363 "label": dt_deploy_label,
364 "type": "shell",
365 "command": "bash",
366 "linux": {
367 "options": {
368 "cwd": oe_init_dir
369 }
370 },
371 "args": [
372 "--login",
373 "-c",
374 "%s%s %s" % (
375 oe_init, dt_deploy_cmd, deploy_opts)
376 ],
377 "problemMatcher": []
378 },
379 {
380 "label": dt_build_deploy_label,
381 "dependsOrder": "sequence",
382 "dependsOn": [
383 dt_build_label,
384 dt_deploy_label
385 ],
386 "problemMatcher": [],
387 "group": {
388 "kind": "build",
389 "isDefault": True
390 }
391 },
392 {
393 "label": dt_build_clean_label,
394 "type": "shell",
395 "command": "bash",
396 "linux": {
397 "options": {
398 "cwd": oe_init_dir
399 }
400 },
401 "args": [
402 "--login",
403 "-c",
404 "%s%s" % (oe_init, dt_build_clean_cmd)
405 ],
406 "problemMatcher": []
407 }
408 ]
409 }
410 if modified_recipe.gdb_cross:
411 for gdb_cross_config in self.gdb_cross_configs:
412 if gdb_cross_config.modified_recipe is not modified_recipe:
413 continue
414 tasks_dict['tasks'].append(
415 {
416 "label": gdb_cross_config.id_pretty,
417 "type": "shell",
418 "isBackground": True,
419 "dependsOn": [
420 dt_build_deploy_label
421 ],
422 "command": gdb_cross_config.gdbserver_script,
423 "problemMatcher": [
424 {
425 "pattern": [
426 {
427 "regexp": ".",
428 "file": 1,
429 "location": 2,
430 "message": 3
431 }
432 ],
433 "background": {
434 "activeOnStart": True,
435 "beginsPattern": ".",
436 "endsPattern": ".",
437 }
438 }
439 ]
440 })
441 tasks_file = 'tasks.json'
442 IdeBase.update_json_file(
443 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
444
445 def vscode_tasks(self, args, modified_recipe):
446 if modified_recipe.build_tool.is_c_ccp:
447 self.vscode_tasks_cpp(args, modified_recipe)
448 else:
449 self.vscode_tasks_fallback(args, modified_recipe)
450
451 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
452 self.vscode_settings(modified_recipe, image_recipe)
453 self.vscode_extensions(modified_recipe)
454 self.vscode_c_cpp_properties(modified_recipe)
455 if args.target:
456 self.initialize_gdb_cross_configs(
457 image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
458 self.vscode_launch(modified_recipe)
459 self.vscode_tasks(args, modified_recipe)
460
461
462def register_ide_plugin(ide_plugins):
463 ide_plugins['code'] = IdeVSCode
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py
deleted file mode 100644
index f106c5a026..0000000000
--- a/scripts/lib/devtool/ide_plugins/ide_none.py
+++ /dev/null
@@ -1,53 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk generic IDE plugin"""
7
8import os
9import logging
10from devtool.ide_plugins import IdeBase, GdbCrossConfig
11
12logger = logging.getLogger('devtool')
13
14
15class IdeNone(IdeBase):
16 """Generate some generic helpers for other IDEs
17
18 Modified recipe mode:
19 Generate some helper scripts for remote debugging with GDB
20
21 Shared sysroot mode:
22 A wrapper for bitbake meta-ide-support and bitbake build-sysroots
23 """
24
25 def __init__(self):
26 super().__init__()
27
28 def setup_shared_sysroots(self, shared_env):
29 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
30 deploy_dir_image = shared_env.ide_support.deploy_dir_image
31 env_script = os.path.join(
32 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
33 logger.info(
34 "To use this SDK please source this: %s" % env_script)
35
36 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
37 """generate some helper scripts and config files
38
39 - Execute the do_install task
40 - Execute devtool deploy-target
41 - Generate a gdbinit file per executable
42 - Generate the oe-scripts sym-link
43 """
44 script_path = modified_recipe.gen_install_deploy_script(args)
45 logger.info("Created: %s" % script_path)
46
47 self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
48
49 IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
50
51
52def register_ide_plugin(ide_plugins):
53 ide_plugins['none'] = IdeNone
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
deleted file mode 100755
index f8cf65f4a8..0000000000
--- a/scripts/lib/devtool/ide_sdk.py
+++ /dev/null
@@ -1,1009 +0,0 @@
1# Development tool - ide-sdk command plugin
2#
3# Copyright (C) 2023-2024 Siemens AG
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool ide-sdk plugin"""
8
9import json
10import logging
11import os
12import re
13import shutil
14import stat
15import subprocess
16import sys
17from argparse import RawTextHelpFormatter
18from enum import Enum
19
20import scriptutils
21import bb
22from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe
23from devtool.standard import get_real_srctree
24from devtool.ide_plugins import BuildTool
25
26
27logger = logging.getLogger('devtool')
28
29# dict of classes derived from IdeBase
30ide_plugins = {}
31
32
33class DevtoolIdeMode(Enum):
34 """Different modes are supported by the ide-sdk plugin.
35
36 The enum might be extended by more advanced modes in the future. Some ideas:
37 - auto: modified if all recipes are modified, shared if none of the recipes is modified.
38 - mixed: modified mode for modified recipes, shared mode for all other recipes.
39 """
40
41 modified = 'modified'
42 shared = 'shared'
43
44
45class TargetDevice:
46 """SSH remote login parameters"""
47
48 def __init__(self, args):
49 self.extraoptions = ''
50 if args.no_host_check:
51 self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
52 self.ssh_sshexec = 'ssh'
53 if args.ssh_exec:
54 self.ssh_sshexec = args.ssh_exec
55 self.ssh_port = ''
56 if args.port:
57 self.ssh_port = "-p %s" % args.port
58 if args.key:
59 self.extraoptions += ' -i %s' % args.key
60
61 self.target = args.target
62 target_sp = args.target.split('@')
63 if len(target_sp) == 1:
64 self.login = ""
65 self.host = target_sp[0]
66 elif len(target_sp) == 2:
67 self.login = target_sp[0]
68 self.host = target_sp[1]
69 else:
70 logger.error("Invalid target argument: %s" % args.target)
71
72
73class RecipeNative:
74 """Base class for calling bitbake to provide a -native recipe"""
75
76 def __init__(self, name, target_arch=None):
77 self.name = name
78 self.target_arch = target_arch
79 self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot']
80 self.staging_bindir_native = None
81 self.target_sys = None
82 self.__native_bin = None
83
84 def _initialize(self, config, workspace, tinfoil):
85 """Get the parsed recipe"""
86 recipe_d = parse_recipe(
87 config, tinfoil, self.name, appends=True, filter_workspace=False)
88 if not recipe_d:
89 raise DevtoolError("Parsing %s recipe failed" % self.name)
90 self.staging_bindir_native = os.path.realpath(
91 recipe_d.getVar('STAGING_BINDIR_NATIVE'))
92 self.target_sys = recipe_d.getVar('TARGET_SYS')
93 return recipe_d
94
95 def initialize(self, config, workspace, tinfoil):
96 """Basic initialization that can be overridden by a derived class"""
97 self._initialize(config, workspace, tinfoil)
98
99 @property
100 def native_bin(self):
101 if not self.__native_bin:
102 raise DevtoolError("native binary name is not defined.")
103 return self.__native_bin
104
105
106class RecipeGdbCross(RecipeNative):
107 """Handle handle gdb-cross on the host and the gdbserver on the target device"""
108
109 def __init__(self, args, target_arch, target_device):
110 super().__init__('gdb-cross-' + target_arch, target_arch)
111 self.target_device = target_device
112 self.gdb = None
113 self.gdbserver_port_next = int(args.gdbserver_port_start)
114 self.config_db = {}
115
116 def __find_gdbserver(self, config, tinfoil):
117 """Absolute path of the gdbserver"""
118 recipe_d_gdb = parse_recipe(
119 config, tinfoil, 'gdb', appends=True, filter_workspace=False)
120 if not recipe_d_gdb:
121 raise DevtoolError("Parsing gdb recipe failed")
122 return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver')
123
124 def initialize(self, config, workspace, tinfoil):
125 super()._initialize(config, workspace, tinfoil)
126 gdb_bin = self.target_sys + '-gdb'
127 gdb_path = os.path.join(
128 self.staging_bindir_native, self.target_sys, gdb_bin)
129 self.gdb = gdb_path
130 self.gdbserver_path = self.__find_gdbserver(config, tinfoil)
131
132 @property
133 def host(self):
134 return self.target_device.host
135
136
137class RecipeImage:
138 """Handle some image recipe related properties
139
140 Most workflows require firmware that runs on the target device.
141 This firmware must be consistent with the setup of the host system.
142 In particular, the debug symbols must be compatible. For this, the
143 rootfs must be created as part of the SDK.
144 """
145
146 def __init__(self, name):
147 self.combine_dbg_image = False
148 self.gdbserver_missing = False
149 self.name = name
150 self.rootfs = None
151 self.__rootfs_dbg = None
152 self.bootstrap_tasks = [self.name + ':do_build']
153
154 def initialize(self, config, tinfoil):
155 image_d = parse_recipe(
156 config, tinfoil, self.name, appends=True, filter_workspace=False)
157 if not image_d:
158 raise DevtoolError(
159 "Parsing image recipe %s failed" % self.name)
160
161 self.combine_dbg_image = bb.data.inherits_class(
162 'image-combined-dbg', image_d)
163
164 workdir = image_d.getVar('WORKDIR')
165 self.rootfs = os.path.join(workdir, 'rootfs')
166 if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1":
167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
168
169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
170 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES')
171
172 @property
173 def debug_support(self):
174 return bool(self.rootfs_dbg)
175
176 @property
177 def rootfs_dbg(self):
178 if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg):
179 return self.__rootfs_dbg
180 return None
181
182
183class RecipeMetaIdeSupport:
184 """For the shared sysroots mode meta-ide-support is needed
185
186 For use cases where just a cross tool-chain is required but
187 no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support
188 and bitbake build-sysroots. This also allows to expose the cross-toolchains
189 to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits.
190 """
191
192 def __init__(self):
193 self.bootstrap_tasks = ['meta-ide-support:do_build']
194 self.topdir = None
195 self.datadir = None
196 self.deploy_dir_image = None
197 self.build_sys = None
198 # From toolchain-scripts
199 self.real_multimach_target_sys = None
200
201 def initialize(self, config, tinfoil):
202 meta_ide_support_d = parse_recipe(
203 config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False)
204 if not meta_ide_support_d:
205 raise DevtoolError("Parsing meta-ide-support recipe failed")
206
207 self.topdir = meta_ide_support_d.getVar('TOPDIR')
208 self.datadir = meta_ide_support_d.getVar('datadir')
209 self.deploy_dir_image = meta_ide_support_d.getVar(
210 'DEPLOY_DIR_IMAGE')
211 self.build_sys = meta_ide_support_d.getVar('BUILD_SYS')
212 self.real_multimach_target_sys = meta_ide_support_d.getVar(
213 'REAL_MULTIMACH_TARGET_SYS')
214
215
216class RecipeBuildSysroots:
217 """For the shared sysroots mode build-sysroots is needed"""
218
219 def __init__(self):
220 self.standalone_sysroot = None
221 self.standalone_sysroot_native = None
222 self.bootstrap_tasks = [
223 'build-sysroots:do_build_target_sysroot',
224 'build-sysroots:do_build_native_sysroot'
225 ]
226
227 def initialize(self, config, tinfoil):
228 build_sysroots_d = parse_recipe(
229 config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False)
230 if not build_sysroots_d:
231 raise DevtoolError("Parsing build-sysroots recipe failed")
232 self.standalone_sysroot = build_sysroots_d.getVar(
233 'STANDALONE_SYSROOT')
234 self.standalone_sysroot_native = build_sysroots_d.getVar(
235 'STANDALONE_SYSROOT_NATIVE')
236
237
238class SharedSysrootsEnv:
239 """Handle the shared sysroots based workflow
240
241 Support the workflow with just a tool-chain without a recipe.
242 It's basically like:
243 bitbake some-dependencies
244 bitbake meta-ide-support
245 bitbake build-sysroots
246 Use the environment-* file found in the deploy folder
247 """
248
249 def __init__(self):
250 self.ide_support = None
251 self.build_sysroots = None
252
253 def initialize(self, ide_support, build_sysroots):
254 self.ide_support = ide_support
255 self.build_sysroots = build_sysroots
256
257 def setup_ide(self, ide):
258 ide.setup(self)
259
260
261class RecipeNotModified:
262 """Handling of recipes added to the Direct DSK shared sysroots."""
263
264 def __init__(self, name):
265 self.name = name
266 self.bootstrap_tasks = [name + ':do_populate_sysroot']
267
268
269class RecipeModified:
270 """Handling of recipes in the workspace created by devtool modify"""
271 OE_INIT_BUILD_ENV = 'oe-init-build-env'
272
273 VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$")
274
275 def __init__(self, name):
276 self.name = name
277 self.bootstrap_tasks = [name + ':do_install']
278 self.gdb_cross = None
279 # workspace
280 self.real_srctree = None
281 self.srctree = None
282 self.ide_sdk_dir = None
283 self.ide_sdk_scripts_dir = None
284 self.bbappend = None
285 # recipe variables from d.getVar
286 self.b = None
287 self.base_libdir = None
288 self.bblayers = None
289 self.bpn = None
290 self.d = None
291 self.debug_build = None
292 self.fakerootcmd = None
293 self.fakerootenv = None
294 self.libdir = None
295 self.max_process = None
296 self.package_arch = None
297 self.package_debug_split_style = None
298 self.path = None
299 self.pn = None
300 self.recipe_sysroot = None
301 self.recipe_sysroot_native = None
302 self.staging_incdir = None
303 self.strip_cmd = None
304 self.target_arch = None
305 self.target_dbgsrc_dir = None
306 self.topdir = None
307 self.workdir = None
308 self.recipe_id = None
309 # replicate bitbake build environment
310 self.exported_vars = None
311 self.cmd_compile = None
312 self.__oe_init_dir = None
313 # main build tool used by this recipe
314 self.build_tool = BuildTool.UNDEFINED
315 # build_tool = cmake
316 self.oecmake_generator = None
317 self.cmake_cache_vars = None
318 # build_tool = meson
319 self.meson_buildtype = None
320 self.meson_wrapper = None
321 self.mesonopts = None
322 self.extra_oemeson = None
323 self.meson_cross_file = None
324
325 def initialize(self, config, workspace, tinfoil):
326 recipe_d = parse_recipe(
327 config, tinfoil, self.name, appends=True, filter_workspace=False)
328 if not recipe_d:
329 raise DevtoolError("Parsing %s recipe failed" % self.name)
330
331 # Verify this recipe is built as externalsrc setup by devtool modify
332 workspacepn = check_workspace_recipe(
333 workspace, self.name, bbclassextend=True)
334 self.srctree = workspace[workspacepn]['srctree']
335 # Need to grab this here in case the source is within a subdirectory
336 self.real_srctree = get_real_srctree(
337 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR'))
338 self.bbappend = workspace[workspacepn]['bbappend']
339
340 self.ide_sdk_dir = os.path.join(
341 config.workspace_path, 'ide-sdk', self.name)
342 if os.path.exists(self.ide_sdk_dir):
343 shutil.rmtree(self.ide_sdk_dir)
344 self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts')
345
346 self.b = recipe_d.getVar('B')
347 self.base_libdir = recipe_d.getVar('base_libdir')
348 self.bblayers = recipe_d.getVar('BBLAYERS').split()
349 self.bpn = recipe_d.getVar('BPN')
350 self.cxx = recipe_d.getVar('CXX')
351 self.d = recipe_d.getVar('D')
352 self.debug_build = recipe_d.getVar('DEBUG_BUILD')
353 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
354 self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
355 self.libdir = recipe_d.getVar('libdir')
356 self.max_process = int(recipe_d.getVar(
357 "BB_NUMBER_THREADS") or os.cpu_count() or 1)
358 self.package_arch = recipe_d.getVar('PACKAGE_ARCH')
359 self.package_debug_split_style = recipe_d.getVar(
360 'PACKAGE_DEBUG_SPLIT_STYLE')
361 self.path = recipe_d.getVar('PATH')
362 self.pn = recipe_d.getVar('PN')
363 self.recipe_sysroot = os.path.realpath(
364 recipe_d.getVar('RECIPE_SYSROOT'))
365 self.recipe_sysroot_native = os.path.realpath(
366 recipe_d.getVar('RECIPE_SYSROOT_NATIVE'))
367 self.staging_bindir_toolchain = os.path.realpath(
368 recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN'))
369 self.staging_incdir = os.path.realpath(
370 recipe_d.getVar('STAGING_INCDIR'))
371 self.strip_cmd = recipe_d.getVar('STRIP')
372 self.target_arch = recipe_d.getVar('TARGET_ARCH')
373 self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR')
374 self.topdir = recipe_d.getVar('TOPDIR')
375 self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR'))
376
377 self.__init_exported_variables(recipe_d)
378
379 if bb.data.inherits_class('cmake', recipe_d):
380 self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR')
381 self.__init_cmake_preset_cache(recipe_d)
382 self.build_tool = BuildTool.CMAKE
383 elif bb.data.inherits_class('meson', recipe_d):
384 self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE')
385 self.mesonopts = recipe_d.getVar('MESONOPTS')
386 self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON')
387 self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE')
388 self.build_tool = BuildTool.MESON
389
390 # Recipe ID is the identifier for IDE config sections
391 self.recipe_id = self.bpn + "-" + self.package_arch
392 self.recipe_id_pretty = self.bpn + ": " + self.package_arch
393
394 @staticmethod
395 def is_valid_shell_variable(var):
396 """Skip strange shell variables like systemd
397
398 prevent from strange bugs because of strange variables which
399 are not used in this context but break various tools.
400 """
401 if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var):
402 bb.debug(1, "ignoring variable: %s" % var)
403 return True
404 return False
405
406 def solib_search_path(self, image):
407 """Search for debug symbols in the rootfs and rootfs-dbg
408
409 The debug symbols of shared libraries which are provided by other packages
410 are grabbed from the -dbg packages in the rootfs-dbg.
411
412 But most cross debugging tools like gdb, perf, and systemtap need to find
413 executable/library first and through it debuglink note find corresponding
414 symbols file. Therefore the library paths from the rootfs are added as well.
415
416 Note: For the devtool modified recipe compiled from the IDE, the debug
417 symbols are taken from the unstripped binaries in the image folder.
418 Also, devtool deploy-target takes the files from the image folder.
419 debug symbols in the image folder refer to the corresponding source files
420 with absolute paths of the build machine. Debug symbols found in the
421 rootfs-dbg are relocated and contain paths which refer to the source files
422 installed on the target device e.g. /usr/src/...
423 """
424 base_libdir = self.base_libdir.lstrip('/')
425 libdir = self.libdir.lstrip('/')
426 so_paths = [
427 # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug
428 os.path.join(image.rootfs_dbg, base_libdir, ".debug"),
429 os.path.join(image.rootfs_dbg, libdir, ".debug"),
430 # debug symbols for package_debug_split_style: debug-file-directory
431 os.path.join(image.rootfs_dbg, "usr", "lib", "debug"),
432
433 # The binaries are required as well, the debug packages are not enough
434 # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg
435 os.path.join(image.rootfs_dbg, base_libdir),
436 os.path.join(image.rootfs_dbg, libdir),
437 # Without image-combined-dbg.bbclass the binaries are only in rootfs.
438 # Note: Stepping into source files located in rootfs-dbg does not
439 # work without image-combined-dbg.bbclass yet.
440 os.path.join(image.rootfs, base_libdir),
441 os.path.join(image.rootfs, libdir)
442 ]
443 return so_paths
444
445 def solib_search_path_str(self, image):
446 """Return a : separated list of paths usable by GDB's set solib-search-path"""
447 return ':'.join(self.solib_search_path(image))
448
449 def __init_exported_variables(self, d):
450 """Find all variables with export flag set.
451
452 This allows to generate IDE configurations which compile with the same
453 environment as bitbake does. That's at least a reasonable default behavior.
454 """
455 exported_vars = {}
456
457 vars = (key for key in d.keys() if not key.startswith(
458 "__") and not d.getVarFlag(key, "func", False))
459 for var in sorted(vars):
460 func = d.getVarFlag(var, "func", False)
461 if d.getVarFlag(var, 'python', False) and func:
462 continue
463 export = d.getVarFlag(var, "export", False)
464 unexport = d.getVarFlag(var, "unexport", False)
465 if not export and not unexport and not func:
466 continue
467 if unexport:
468 continue
469
470 val = d.getVar(var)
471 if val is None:
472 continue
473 if set(var) & set("-.{}+"):
474 logger.warn(
475 "Warning: Found invalid character in variable name %s", str(var))
476 continue
477 varExpanded = d.expand(var)
478 val = str(val)
479
480 if not RecipeModified.is_valid_shell_variable(varExpanded):
481 continue
482
483 if func:
484 code_line = "line: {0}, file: {1}\n".format(
485 d.getVarFlag(var, "lineno", False),
486 d.getVarFlag(var, "filename", False))
487 val = val.rstrip('\n')
488 logger.warn("Warning: exported shell function %s() is not exported (%s)" %
489 (varExpanded, code_line))
490 continue
491
492 if export:
493 exported_vars[varExpanded] = val.strip()
494 continue
495
496 self.exported_vars = exported_vars
497
498 def __init_cmake_preset_cache(self, d):
499 """Get the arguments passed to cmake
500
501 Replicate the cmake configure arguments with all details to
502 share on build folder between bitbake and SDK.
503 """
504 site_file = os.path.join(self.workdir, 'site-file.cmake')
505 if os.path.exists(site_file):
506 print("Warning: site-file.cmake is not supported")
507
508 cache_vars = {}
509 oecmake_args = d.getVar('OECMAKE_ARGS').split()
510 extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
511 for param in sorted(oecmake_args + extra_oecmake):
512 d_pref = "-D"
513 if param.startswith(d_pref):
514 param = param[len(d_pref):]
515 else:
516 print("Error: expected a -D")
517 param_s = param.split('=', 1)
518 param_nt = param_s[0].split(':', 1)
519
520 def handle_undefined_variable(var):
521 if var.startswith('${') and var.endswith('}'):
522 return ''
523 else:
524 return var
525 # Example: FOO=ON
526 if len(param_nt) == 1:
527 cache_vars[param_s[0]] = handle_undefined_variable(param_s[1])
528 # Example: FOO:PATH=/tmp
529 elif len(param_nt) == 2:
530 cache_vars[param_nt[0]] = {
531 "type": param_nt[1],
532 "value": handle_undefined_variable(param_s[1]),
533 }
534 else:
535 print("Error: cannot parse %s" % param)
536 self.cmake_cache_vars = cache_vars
537
538 def cmake_preset(self):
539 """Create a preset for cmake that mimics how bitbake calls cmake"""
540 toolchain_file = os.path.join(self.workdir, 'toolchain.cmake')
541 cmake_executable = os.path.join(
542 self.recipe_sysroot_native, 'usr', 'bin', 'cmake')
543 self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id
544
545 preset_dict_configure = {
546 "name": self.recipe_id,
547 "displayName": self.recipe_id_pretty,
548 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
549 "binaryDir": self.b,
550 "generator": self.oecmake_generator,
551 "toolchainFile": toolchain_file,
552 "cacheVariables": self.cmake_cache_vars,
553 "environment": self.exported_vars,
554 "cmakeExecutable": cmake_executable
555 }
556
557 preset_dict_build = {
558 "name": self.recipe_id,
559 "displayName": self.recipe_id_pretty,
560 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
561 "configurePreset": self.recipe_id,
562 "inheritConfigureEnvironment": True
563 }
564
565 preset_dict_test = {
566 "name": self.recipe_id,
567 "displayName": self.recipe_id_pretty,
568 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
569 "configurePreset": self.recipe_id,
570 "inheritConfigureEnvironment": True
571 }
572
573 preset_dict = {
574 "version": 3, # cmake 3.21, backward compatible with kirkstone
575 "configurePresets": [preset_dict_configure],
576 "buildPresets": [preset_dict_build],
577 "testPresets": [preset_dict_test]
578 }
579
580 # Finally write the json file
581 json_file = 'CMakeUserPresets.json'
582 json_path = os.path.join(self.real_srctree, json_file)
583 logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path))
584 if not os.path.exists(self.real_srctree):
585 os.makedirs(self.real_srctree)
586 try:
587 with open(json_path) as f:
588 orig_dict = json.load(f)
589 except json.decoder.JSONDecodeError:
590 logger.info(
591 "Decoding %s failed. Probably because of comments in the json file" % json_path)
592 orig_dict = {}
593 except FileNotFoundError:
594 orig_dict = {}
595
596 # Add or update the presets for the recipe and keep other presets
597 for k, v in preset_dict.items():
598 if isinstance(v, list):
599 update_preset = v[0]
600 preset_added = False
601 if k in orig_dict:
602 for index, orig_preset in enumerate(orig_dict[k]):
603 if 'name' in orig_preset:
604 if orig_preset['name'] == update_preset['name']:
605 logger.debug("Updating preset: %s" %
606 orig_preset['name'])
607 orig_dict[k][index] = update_preset
608 preset_added = True
609 break
610 else:
611 logger.debug("keeping preset: %s" %
612 orig_preset['name'])
613 else:
614 logger.warn("preset without a name found")
615 if not preset_added:
616 if not k in orig_dict:
617 orig_dict[k] = []
618 orig_dict[k].append(update_preset)
619 logger.debug("Added preset: %s" %
620 update_preset['name'])
621 else:
622 orig_dict[k] = v
623
624 with open(json_path, 'w') as f:
625 json.dump(orig_dict, f, indent=4)
626
627 def gen_meson_wrapper(self):
628 """Generate a wrapper script to call meson with the cross environment"""
629 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
630 meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson')
631 meson_real = os.path.join(
632 self.recipe_sysroot_native, 'usr', 'bin', 'meson.real')
633 with open(meson_wrapper, 'w') as mwrap:
634 mwrap.write("#!/bin/sh" + os.linesep)
635 for var, val in self.exported_vars.items():
636 mwrap.write('export %s="%s"' % (var, val) + os.linesep)
637 mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep)
638 private_temp = os.path.join(self.b, "meson-private", "tmp")
639 mwrap.write('mkdir -p "%s"' % private_temp + os.linesep)
640 mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep)
641 mwrap.write('exec "%s" "$@"' % meson_real + os.linesep)
642 st = os.stat(meson_wrapper)
643 os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC)
644 self.meson_wrapper = meson_wrapper
645 self.cmd_compile = meson_wrapper + " compile -C " + self.b
646
647 def which(self, executable):
648 bin_path = shutil.which(executable, path=self.path)
649 if not bin_path:
650 raise DevtoolError(
651 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn))
652 return bin_path
653
654 @staticmethod
655 def is_elf_file(file_path):
656 with open(file_path, "rb") as f:
657 data = f.read(4)
658 if data == b'\x7fELF':
659 return True
660 return False
661
662 def find_installed_binaries(self):
663 """find all executable elf files in the image directory"""
664 binaries = []
665 d_len = len(self.d)
666 re_so = re.compile(r'.*\.so[.0-9]*$')
667 for root, _, files in os.walk(self.d, followlinks=False):
668 for file in files:
669 if os.path.islink(file):
670 continue
671 if re_so.match(file):
672 continue
673 abs_name = os.path.join(root, file)
674 if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name):
675 binaries.append(abs_name[d_len:])
676 return sorted(binaries)
677
678 def gen_deploy_target_script(self, args):
679 """Generate a script which does what devtool deploy-target does
680
681 This script is much quicker than devtool target-deploy. Because it
682 does not need to start a bitbake server. All information from tinfoil
683 is hard-coded in the generated script.
684 """
685 cmd_lines = ['#!%s' % str(sys.executable)]
686 cmd_lines.append('import sys')
687 cmd_lines.append('devtool_sys_path = %s' % str(sys.path))
688 cmd_lines.append('devtool_sys_path.reverse()')
689 cmd_lines.append('for p in devtool_sys_path:')
690 cmd_lines.append(' if p not in sys.path:')
691 cmd_lines.append(' sys.path.insert(0, p)')
692 cmd_lines.append('from devtool.deploy import deploy_no_d')
693 args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check',
694 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target']
695 filtered_args_dict = {key: value for key, value in vars(
696 args).items() if key in args_filter}
697 cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict))
698 cmd_lines.append('class Dict2Class(object):')
699 cmd_lines.append(' def __init__(self, my_dict):')
700 cmd_lines.append(' for key in my_dict:')
701 cmd_lines.append(' setattr(self, key, my_dict[key])')
702 cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)')
703 cmd_lines.append(
704 'setattr(filtered_args, "recipename", "%s")' % self.bpn)
705 cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' %
706 (self.d, self.workdir, self.path, self.strip_cmd,
707 self.libdir, self.base_libdir, self.max_process,
708 self.fakerootcmd, self.fakerootenv))
709 return self.write_script(cmd_lines, 'deploy_target')
710
711 def gen_install_deploy_script(self, args):
712 """Generate a script which does install and deploy"""
713 cmd_lines = ['#!/bin/bash']
714
715 # . oe-init-build-env $BUILDDIR
716 # Note: Sourcing scripts with arguments requires bash
717 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
718 self.oe_init_dir, self.oe_init_dir))
719 cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % (
720 self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir))
721
722 # bitbake -c install
723 cmd_lines.append(
724 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn))
725
726 # Self contained devtool deploy-target
727 cmd_lines.append(self.gen_deploy_target_script(args))
728
729 return self.write_script(cmd_lines, 'install_and_deploy')
730
731 def write_script(self, cmd_lines, script_name):
732 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
733 script_name_arch = script_name + '_' + self.recipe_id
734 script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch)
735 with open(script_file, 'w') as script_f:
736 script_f.write(os.linesep.join(cmd_lines))
737 st = os.stat(script_file)
738 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
739 return script_file
740
741 @property
742 def oe_init_build_env(self):
743 """Find the oe-init-build-env used for this setup"""
744 oe_init_dir = self.oe_init_dir
745 if oe_init_dir:
746 return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
747 return None
748
749 @property
750 def oe_init_dir(self):
751 """Find the directory where the oe-init-build-env is located
752
753 Assumption: There might be a layer with higher priority than poky
754 which provides to oe-init-build-env in the layer's toplevel folder.
755 """
756 if not self.__oe_init_dir:
757 for layer in reversed(self.bblayers):
758 result = subprocess.run(
759 ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True)
760 if result.returncode == 0:
761 oe_init_dir = result.stdout.decode('utf-8').strip()
762 oe_init_path = os.path.join(
763 oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
764 if os.path.exists(oe_init_path):
765 logger.debug("Using %s from: %s" % (
766 RecipeModified.OE_INIT_BUILD_ENV, oe_init_path))
767 self.__oe_init_dir = oe_init_dir
768 break
769 if not self.__oe_init_dir:
770 logger.error("Cannot find the bitbake top level folder")
771 return self.__oe_init_dir
772
773
774def ide_setup(args, config, basepath, workspace):
775 """Generate the IDE configuration for the workspace"""
776
777 # Explicitely passing some special recipes does not make sense
778 for recipe in args.recipenames:
779 if recipe in ['meta-ide-support', 'build-sysroots']:
780 raise DevtoolError("Invalid recipe: %s." % recipe)
781
782 # Collect information about tasks which need to be bitbaked
783 bootstrap_tasks = []
784 bootstrap_tasks_late = []
785 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
786 try:
787 # define mode depending on recipes which need to be processed
788 recipes_image_names = []
789 recipes_modified_names = []
790 recipes_other_names = []
791 for recipe in args.recipenames:
792 try:
793 check_workspace_recipe(
794 workspace, recipe, bbclassextend=True)
795 recipes_modified_names.append(recipe)
796 except DevtoolError:
797 recipe_d = parse_recipe(
798 config, tinfoil, recipe, appends=True, filter_workspace=False)
799 if not recipe_d:
800 raise DevtoolError("Parsing recipe %s failed" % recipe)
801 if bb.data.inherits_class('image', recipe_d):
802 recipes_image_names.append(recipe)
803 else:
804 recipes_other_names.append(recipe)
805
806 invalid_params = False
807 if args.mode == DevtoolIdeMode.shared:
808 if len(recipes_modified_names):
809 logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str(
810 recipes_modified_names))
811 invalid_params = True
812 if args.mode == DevtoolIdeMode.modified:
813 if len(recipes_other_names):
814 logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str(
815 recipes_other_names))
816 invalid_params = True
817 if len(recipes_image_names) != 1:
818 logger.error(
819 "One image recipe is required as the rootfs for the remote development.")
820 invalid_params = True
821 for modified_recipe_name in recipes_modified_names:
822 if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'):
823 logger.error(
824 "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name)
825 invalid_params = True
826
827 if invalid_params:
828 raise DevtoolError("Invalid parameters are passed.")
829
830 # For the shared sysroots mode, add all dependencies of all the images to the sysroots
831 # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg
832 recipes_images = []
833 for recipes_image_name in recipes_image_names:
834 logger.info("Using image: %s" % recipes_image_name)
835 recipe_image = RecipeImage(recipes_image_name)
836 recipe_image.initialize(config, tinfoil)
837 bootstrap_tasks += recipe_image.bootstrap_tasks
838 recipes_images.append(recipe_image)
839
840 # Provide a Direct SDK with shared sysroots
841 recipes_not_modified = []
842 if args.mode == DevtoolIdeMode.shared:
843 ide_support = RecipeMetaIdeSupport()
844 ide_support.initialize(config, tinfoil)
845 bootstrap_tasks += ide_support.bootstrap_tasks
846
847 logger.info("Adding %s to the Direct SDK sysroots." %
848 str(recipes_other_names))
849 for recipe_name in recipes_other_names:
850 recipe_not_modified = RecipeNotModified(recipe_name)
851 bootstrap_tasks += recipe_not_modified.bootstrap_tasks
852 recipes_not_modified.append(recipe_not_modified)
853
854 build_sysroots = RecipeBuildSysroots()
855 build_sysroots.initialize(config, tinfoil)
856 bootstrap_tasks_late += build_sysroots.bootstrap_tasks
857 shared_env = SharedSysrootsEnv()
858 shared_env.initialize(ide_support, build_sysroots)
859
860 recipes_modified = []
861 if args.mode == DevtoolIdeMode.modified:
862 logger.info("Setting up workspaces for modified recipe: %s" %
863 str(recipes_modified_names))
864 gdbs_cross = {}
865 for recipe_name in recipes_modified_names:
866 recipe_modified = RecipeModified(recipe_name)
867 recipe_modified.initialize(config, workspace, tinfoil)
868 bootstrap_tasks += recipe_modified.bootstrap_tasks
869 recipes_modified.append(recipe_modified)
870
871 if recipe_modified.target_arch not in gdbs_cross:
872 target_device = TargetDevice(args)
873 gdb_cross = RecipeGdbCross(
874 args, recipe_modified.target_arch, target_device)
875 gdb_cross.initialize(config, workspace, tinfoil)
876 bootstrap_tasks += gdb_cross.bootstrap_tasks
877 gdbs_cross[recipe_modified.target_arch] = gdb_cross
878 recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch]
879
880 finally:
881 tinfoil.shutdown()
882
883 if not args.skip_bitbake:
884 bb_cmd = 'bitbake '
885 if args.bitbake_k:
886 bb_cmd += "-k "
887 bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks)
888 exec_build_env_command(
889 config.init_path, basepath, bb_cmd_early, watch=True)
890 if bootstrap_tasks_late:
891 bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late)
892 exec_build_env_command(
893 config.init_path, basepath, bb_cmd_late, watch=True)
894
895 for recipe_image in recipes_images:
896 if (recipe_image.gdbserver_missing):
897 logger.warning(
898 "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image)
899
900 if recipe_image.combine_dbg_image is False:
901 logger.warning(
902 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image)
903
904 # Instantiate the active IDE plugin
905 ide = ide_plugins[args.ide]()
906 if args.mode == DevtoolIdeMode.shared:
907 ide.setup_shared_sysroots(shared_env)
908 elif args.mode == DevtoolIdeMode.modified:
909 for recipe_modified in recipes_modified:
910 if recipe_modified.build_tool is BuildTool.CMAKE:
911 recipe_modified.cmake_preset()
912 if recipe_modified.build_tool is BuildTool.MESON:
913 recipe_modified.gen_meson_wrapper()
914 ide.setup_modified_recipe(
915 args, recipe_image, recipe_modified)
916
917 if recipe_modified.debug_build != '1':
918 logger.warn(
919 'Recipe %s is compiled with release build configuration. '
920 'You might want to add DEBUG_BUILD = "1" to %s. '
921 'Note that devtool modify --debug-build can do this automatically.',
922 recipe_modified.name, recipe_modified.bbappend)
923 else:
924 raise DevtoolError("Must not end up here.")
925
926
927def register_commands(subparsers, context):
928 """Register devtool subcommands from this plugin"""
929
930 # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE
931 # configuration is generated. In the case of the eSDK, the bootstrapping is performed
932 # during the installation of the eSDK installer. Running the ide-sdk plugin from an
933 # eSDK installer-based setup would require skipping the bootstrapping and probably
934 # taking some other differences into account when generating the IDE configurations.
935 # This would be possible. But it is not implemented.
936 if context.fixed_setup:
937 return
938
939 global ide_plugins
940
941 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
942 pluginpaths = [os.path.join(path, 'ide_plugins')
943 for path in context.pluginpaths]
944 ide_plugin_modules = []
945 for pluginpath in pluginpaths:
946 scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath)
947
948 for ide_plugin_module in ide_plugin_modules:
949 if hasattr(ide_plugin_module, 'register_ide_plugin'):
950 ide_plugin_module.register_ide_plugin(ide_plugins)
951 # Sort plugins according to their priority. The first entry is the default IDE plugin.
952 ide_plugins = dict(sorted(ide_plugins.items(),
953 key=lambda p: p[1].ide_plugin_priority(), reverse=True))
954
955 parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter,
956 help='Setup the SDK and configure the IDE')
957 parser_ide_sdk.add_argument(
958 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
959 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.')
960 parser_ide_sdk.add_argument(
961 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
962 help='Different SDK types are supported:\n'
963 '- "' + DevtoolIdeMode.modified.name + '" (default):\n'
964 ' devtool modify creates a workspace to work on the source code of a recipe.\n'
965 ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n'
966 ' Usage example:\n'
967 ' devtool modify cmake-example\n'
968 ' devtool ide-sdk cmake-example core-image-minimal\n'
969 ' Start the IDE in the workspace folder\n'
970 ' At least one devtool modified recipe plus one image recipe are required:\n'
971 ' The image recipe is used to generate the target image and the remote debug configuration.\n'
972 '- "' + DevtoolIdeMode.shared.name + '":\n'
973 ' Usage example:\n'
974 ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n'
975 ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n'
976 ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n'
977 ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit')
978 default_ide = list(ide_plugins.keys())[0]
979 parser_ide_sdk.add_argument(
980 '-i', '--ide', choices=ide_plugins.keys(), default=default_ide,
981 help='Setup the configuration for this IDE (default: %s)' % default_ide)
982 parser_ide_sdk.add_argument(
983 '-t', '--target', default='root@192.168.7.2',
984 help='Live target machine running an ssh server: user@hostname.')
985 parser_ide_sdk.add_argument(
986 '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.')
987 parser_ide_sdk.add_argument(
988 '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
989 parser_ide_sdk.add_argument(
990 '-e', '--ssh-exec', help='Executable to use in place of ssh')
991 parser_ide_sdk.add_argument(
992 '-P', '--port', help='Specify ssh port to use for connection to the target')
993 parser_ide_sdk.add_argument(
994 '-I', '--key', help='Specify ssh private key for connection to the target')
995 parser_ide_sdk.add_argument(
996 '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true')
997 parser_ide_sdk.add_argument(
998 '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true')
999 parser_ide_sdk.add_argument(
1000 '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
1001 parser_ide_sdk.add_argument(
1002 '-n', '--dry-run', help='List files to be undeployed only', action='store_true')
1003 parser_ide_sdk.add_argument(
1004 '-s', '--show-status', help='Show progress/status output', action='store_true')
1005 parser_ide_sdk.add_argument(
1006 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
1007 parser_ide_sdk.add_argument(
1008 '--no-check-space', help='Do not check for available space before deploying', action='store_true')
1009 parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/import.py b/scripts/lib/devtool/import.py
deleted file mode 100644
index 6829851669..0000000000
--- a/scripts/lib/devtool/import.py
+++ /dev/null
@@ -1,134 +0,0 @@
1# Development tool - import command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool import plugin"""
8
9import os
10import tarfile
11import logging
12import collections
13import json
14import fnmatch
15
16from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
17from devtool import export
18
19logger = logging.getLogger('devtool')
20
21def devimport(args, config, basepath, workspace):
22 """Entry point for the devtool 'import' subcommand"""
23
24 def get_pn(name):
25 """ Returns the filename of a workspace recipe/append"""
26 metadata = name.split('/')[-1]
27 fn, _ = os.path.splitext(metadata)
28 return fn
29
30 if not os.path.exists(args.file):
31 raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
32
33 with tarfile.open(args.file) as tar:
34 # Get exported metadata
35 export_workspace_path = export_workspace = None
36 try:
37 metadata = tar.getmember(export.metadata)
38 except KeyError as ke:
39 raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
40
41 tar.extract(metadata)
42 with open(metadata.name) as fdm:
43 export_workspace_path, export_workspace = json.load(fdm)
44 os.unlink(metadata.name)
45
46 members = tar.getmembers()
47
48 # Get appends and recipes from the exported archive, these
49 # will be needed to find out those appends without corresponding
50 # recipe pair
51 append_fns, recipe_fns = set(), set()
52 for member in members:
53 if member.name.startswith('appends'):
54 append_fns.add(get_pn(member.name))
55 elif member.name.startswith('recipes'):
56 recipe_fns.add(get_pn(member.name))
57
58 # Setup tinfoil, get required data and shutdown
59 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
60 try:
61 current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
62 finally:
63 tinfoil.shutdown()
64
65 # Find those appends that do not have recipes in current metadata
66 non_importables = []
67 for fn in append_fns - recipe_fns:
68 # Check on current metadata (covering those layers indicated in bblayers.conf)
69 for current_fn in current_fns:
70 if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
71 break
72 else:
73 non_importables.append(fn)
74 logger.warning('No recipe to append %s.bbapppend, skipping' % fn)
75
76 # Extract
77 imported = []
78 for member in members:
79 if member.name == export.metadata:
80 continue
81
82 for nonimp in non_importables:
83 pn = nonimp.split('_')[0]
84 # do not extract data from non-importable recipes or metadata
85 if member.name.startswith('appends/%s' % nonimp) or \
86 member.name.startswith('recipes/%s' % nonimp) or \
87 member.name.startswith('sources/%s' % pn):
88 break
89 else:
90 path = os.path.join(config.workspace_path, member.name)
91 if os.path.exists(path):
92 # by default, no file overwrite is done unless -o is given by the user
93 if args.overwrite:
94 try:
95 tar.extract(member, path=config.workspace_path)
96 except PermissionError as pe:
97 logger.warning(pe)
98 else:
99 logger.warning('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
100 continue
101 else:
102 tar.extract(member, path=config.workspace_path)
103
104 # Update EXTERNALSRC and the devtool md5 file
105 if member.name.startswith('appends'):
106 if export_workspace_path:
107 # appends created by 'devtool modify' just need to update the workspace
108 replace_from_file(path, export_workspace_path, config.workspace_path)
109
110 # appends created by 'devtool add' need replacement of exported source tree
111 pn = get_pn(member.name).split('_')[0]
112 exported_srctree = export_workspace[pn]['srctree']
113 if exported_srctree:
114 replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
115
116 standard._add_md5(config, pn, path)
117 imported.append(pn)
118
119 if imported:
120 logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
121 else:
122 logger.warning('No recipes imported into the workspace')
123
124 return 0
125
126def register_commands(subparsers, context):
127 """Register devtool import subcommands"""
128 parser = subparsers.add_parser('import',
129 help='Import exported tar archive into workspace',
130 description='Import tar archive previously created by "devtool export" into workspace',
131 group='advanced')
132 parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
133 parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
134 parser.set_defaults(func=devimport)
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
deleted file mode 100644
index 1054960551..0000000000
--- a/scripts/lib/devtool/menuconfig.py
+++ /dev/null
@@ -1,76 +0,0 @@
1# OpenEmbedded Development tool - menuconfig command plugin
2#
3# Copyright (C) 2018 Xilinx
4# Written by: Chandana Kalluri <ckalluri@xilinx.com>
5#
6# SPDX-License-Identifier: MIT
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21"""Devtool menuconfig plugin"""
22
23import os
24import bb
25import logging
26from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
27from devtool import check_workspace_recipe
28logger = logging.getLogger('devtool')
29
30def menuconfig(args, config, basepath, workspace):
31 """Entry point for the devtool 'menuconfig' subcommand"""
32
33 rd = ""
34 pn_src = ""
35 localfilesdir = ""
36 workspace_dir = ""
37 tinfoil = setup_tinfoil(basepath=basepath)
38 try:
39 rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
40 if not rd:
41 return 1
42
43 check_workspace_recipe(workspace, args.component)
44 pn = rd.getVar('PN')
45
46 if not rd.getVarFlag('do_menuconfig','task'):
47 raise DevtoolError("This recipe does not support menuconfig option")
48
49 workspace_dir = os.path.join(config.workspace_path,'sources')
50 pn_src = os.path.join(workspace_dir,pn)
51
52 # add check to see if oe_local_files exists or not
53 localfilesdir = os.path.join(pn_src,'oe-local-files')
54 if not os.path.exists(localfilesdir):
55 bb.utils.mkdirhier(localfilesdir)
56 # Add gitignore to ensure source tree is clean
57 gitignorefile = os.path.join(localfilesdir,'.gitignore')
58 with open(gitignorefile, 'w') as f:
59 f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
60 f.write('*\n')
61
62 finally:
63 tinfoil.shutdown()
64
65 logger.info('Launching menuconfig')
66 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
67 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
68 standard._create_kconfig_diff(pn_src,rd,fragment)
69
70 return 0
71
72def register_commands(subparsers, context):
73 """register devtool subcommands from this plugin"""
74 parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
75 parser_menuconfig.add_argument('component', help='compenent to alter config')
76 parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)
diff --git a/scripts/lib/devtool/package.py b/scripts/lib/devtool/package.py
deleted file mode 100644
index c2367342c3..0000000000
--- a/scripts/lib/devtool/package.py
+++ /dev/null
@@ -1,50 +0,0 @@
1# Development tool - package command plugin
2#
3# Copyright (C) 2014-2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool plugin containing the package subcommands"""
8
9import os
10import subprocess
11import logging
12from bb.process import ExecutionError
13from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
14
15logger = logging.getLogger('devtool')
16
17def package(args, config, basepath, workspace):
18 """Entry point for the devtool 'package' subcommand"""
19 check_workspace_recipe(workspace, args.recipename)
20
21 tinfoil = setup_tinfoil(basepath=basepath, config_only=True)
22 try:
23 image_pkgtype = config.get('Package', 'image_pkgtype', '')
24 if not image_pkgtype:
25 image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
26
27 deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
28 finally:
29 tinfoil.shutdown()
30
31 package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
32 try:
33 exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
34 except bb.process.ExecutionError as e:
35 # We've already seen the output since watch=True, so just ensure we return something to the user
36 return e.exitcode
37
38 logger.info('Your packages are in %s' % deploy_dir_pkg)
39
40 return 0
41
42def register_commands(subparsers, context):
43 """Register devtool subcommands from the package plugin"""
44 if context.fixed_setup:
45 parser_package = subparsers.add_parser('package',
46 help='Build packages for a recipe',
47 description='Builds packages for a recipe\'s output files',
48 group='testbuild', order=-5)
49 parser_package.add_argument('recipename', help='Recipe to package')
50 parser_package.set_defaults(func=package)
diff --git a/scripts/lib/devtool/runqemu.py b/scripts/lib/devtool/runqemu.py
deleted file mode 100644
index ead978aabc..0000000000
--- a/scripts/lib/devtool/runqemu.py
+++ /dev/null
@@ -1,64 +0,0 @@
1# Development tool - runqemu command plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool runqemu plugin"""
9
10import os
11import bb
12import logging
13import argparse
14import glob
15from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19def runqemu(args, config, basepath, workspace):
20 """Entry point for the devtool 'runqemu' subcommand"""
21
22 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
23 try:
24 machine = tinfoil.config_data.getVar('MACHINE')
25 bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'),
26 tinfoil.config_data.getVar('BUILD_ARCH'),
27 tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep))
28 finally:
29 tinfoil.shutdown()
30
31 if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
32 raise DevtoolError('QEMU is not available within this SDK')
33
34 imagename = args.imagename
35 if not imagename:
36 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
37 if sdk_targets:
38 imagename = sdk_targets[0]
39 if not imagename:
40 raise DevtoolError('Unable to determine image name to run, please specify one')
41
42 try:
43 # FIXME runqemu assumes that if OECORE_NATIVE_SYSROOT is set then it shouldn't
44 # run bitbake to find out the values of various environment variables, which
45 # isn't the case for the extensible SDK. Work around it for now.
46 newenv = dict(os.environ)
47 newenv.pop('OECORE_NATIVE_SYSROOT', '')
48 exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True, env=newenv)
49 except bb.process.ExecutionError as e:
50 # We've already seen the output since watch=True, so just ensure we return something to the user
51 return e.exitcode
52
53 return 0
54
55def register_commands(subparsers, context):
56 """Register devtool subcommands from this plugin"""
57 if context.fixed_setup:
58 parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
59 description='Runs QEMU to boot the specified image',
60 group='testbuild', order=-20)
61 parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
62 parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
63 nargs=argparse.REMAINDER)
64 parser_runqemu.set_defaults(func=runqemu)
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
deleted file mode 100644
index 9aefd7e354..0000000000
--- a/scripts/lib/devtool/sdk.py
+++ /dev/null
@@ -1,330 +0,0 @@
1# Development tool - sdk-update command plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import subprocess
10import logging
11import glob
12import shutil
13import errno
14import sys
15import tempfile
16import re
17from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
18
19logger = logging.getLogger('devtool')
20
21def parse_locked_sigs(sigfile_path):
22 """Return <pn:task>:<hash> dictionary"""
23 sig_dict = {}
24 with open(sigfile_path) as f:
25 lines = f.readlines()
26 for line in lines:
27 if ':' in line:
28 taskkey, _, hashval = line.rpartition(':')
29 sig_dict[taskkey.strip()] = hashval.split()[0]
30 return sig_dict
31
32def generate_update_dict(sigfile_new, sigfile_old):
33 """Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
34 update_dict = {}
35 sigdict_new = parse_locked_sigs(sigfile_new)
36 sigdict_old = parse_locked_sigs(sigfile_old)
37 for k in sigdict_new:
38 if k not in sigdict_old:
39 update_dict[k] = sigdict_new[k]
40 continue
41 if sigdict_new[k] != sigdict_old[k]:
42 update_dict[k] = sigdict_new[k]
43 continue
44 return update_dict
45
46def get_sstate_objects(update_dict, sstate_dir):
47 """Return a list containing sstate objects which are to be installed"""
48 sstate_objects = []
49 for k in update_dict:
50 files = set()
51 hashval = update_dict[k]
52 p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
53 files |= set(glob.glob(p))
54 p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
55 files |= set(glob.glob(p))
56 files = list(files)
57 if len(files) == 1:
58 sstate_objects.extend(files)
59 elif len(files) > 1:
60 logger.error("More than one matching sstate object found for %s" % hashval)
61
62 return sstate_objects
63
64def mkdir(d):
65 try:
66 os.makedirs(d)
67 except OSError as e:
68 if e.errno != errno.EEXIST:
69 raise e
70
71def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
72 """Install sstate objects into destination SDK"""
73 sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
74 if not os.path.exists(sstate_dir):
75 logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
76 raise
77 for sb in sstate_objects:
78 dst = sb.replace(src_sdk, dest_sdk)
79 destdir = os.path.dirname(dst)
80 mkdir(destdir)
81 logger.debug("Copying %s to %s" % (sb, dst))
82 shutil.copy(sb, dst)
83
84def check_manifest(fn, basepath):
85 import bb.utils
86 changedfiles = []
87 with open(fn, 'r') as f:
88 for line in f:
89 splitline = line.split()
90 if len(splitline) > 1:
91 chksum = splitline[0]
92 fpath = splitline[1]
93 curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
94 if chksum != curr_chksum:
95 logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
96 changedfiles.append(fpath)
97 return changedfiles
98
99def sdk_update(args, config, basepath, workspace):
100 """Entry point for devtool sdk-update command"""
101 updateserver = args.updateserver
102 if not updateserver:
103 updateserver = config.get('SDK', 'updateserver', '')
104 logger.debug("updateserver: %s" % updateserver)
105
106 # Make sure we are using sdk-update from within SDK
107 logger.debug("basepath = %s" % basepath)
108 old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
109 if not os.path.exists(old_locked_sig_file_path):
110 logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
111 return -1
112 else:
113 logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
114
115 if not '://' in updateserver:
116 logger.error("Update server must be a URL")
117 return -1
118
119 layers_dir = os.path.join(basepath, 'layers')
120 conf_dir = os.path.join(basepath, 'conf')
121
122 # Grab variable values
123 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
124 try:
125 stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR')
126 sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS')
127 site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION')
128 finally:
129 tinfoil.shutdown()
130
131 tmpsdk_dir = tempfile.mkdtemp()
132 try:
133 os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
134 new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
135 # Fetch manifest from server
136 tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
137 ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
138 if ret != 0:
139 logger.error("Cannot dowload files from %s" % updateserver)
140 return ret
141 changedfiles = check_manifest(tmpmanifest, basepath)
142 if not changedfiles:
143 logger.info("Already up-to-date")
144 return 0
145 # Update metadata
146 logger.debug("Updating metadata via git ...")
147 #Check for the status before doing a fetch and reset
148 if os.path.exists(os.path.join(basepath, 'layers/.git')):
149 out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
150 if not out:
151 ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
152 else:
153 logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
154 logger.error("Changed files:\n%s" % out);
155 return -1
156 else:
157 ret = -1
158 if ret != 0:
159 ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
160 if ret != 0:
161 logger.error("Updating metadata via git failed")
162 return ret
163 logger.debug("Updating conf files ...")
164 for changedfile in changedfiles:
165 ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
166 if ret != 0:
167 logger.error("Updating %s failed" % changedfile)
168 return ret
169
170 # Check if UNINATIVE_CHECKSUM changed
171 uninative = False
172 if 'conf/local.conf' in changedfiles:
173 def read_uninative_checksums(fn):
174 chksumitems = []
175 with open(fn, 'r') as f:
176 for line in f:
177 if line.startswith('UNINATIVE_CHECKSUM'):
178 splitline = re.split(r'[\[\]"\']', line)
179 if len(splitline) > 3:
180 chksumitems.append((splitline[1], splitline[3]))
181 return chksumitems
182
183 oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
184 newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
185 if oldsums != newsums:
186 uninative = True
187 for buildarch, chksum in newsums:
188 uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
189 mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
190 ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
191
192 # Ok, all is well at this point - move everything over
193 tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
194 if os.path.exists(tmplayers_dir):
195 shutil.rmtree(layers_dir)
196 shutil.move(tmplayers_dir, layers_dir)
197 for changedfile in changedfiles:
198 destfile = os.path.join(basepath, changedfile)
199 os.remove(destfile)
200 shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
201 os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
202 shutil.move(tmpmanifest, conf_dir)
203 if uninative:
204 shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
205 shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
206
207 if not sstate_mirrors:
208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
210 f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
211 finally:
212 shutil.rmtree(tmpsdk_dir)
213
214 if not args.skip_prepare:
215 # Find all potentially updateable tasks
216 sdk_update_targets = []
217 tasks = ['do_populate_sysroot', 'do_packagedata']
218 for root, _, files in os.walk(stamps_dir):
219 for fn in files:
220 if not '.sigdata.' in fn:
221 for task in tasks:
222 if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
223 sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
224 # Run bitbake command for the whole SDK
225 logger.info("Preparing build system... (This may take some time.)")
226 try:
227 exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
228 output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
229 runlines = []
230 for line in output.splitlines():
231 if 'Running task ' in line:
232 runlines.append(line)
233 if runlines:
234 logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
235 return -1
236 except bb.process.ExecutionError as e:
237 logger.error('Preparation failed:\n%s' % e.stdout)
238 return -1
239 return 0
240
241def sdk_install(args, config, basepath, workspace):
242 """Entry point for the devtool sdk-install command"""
243
244 import oe.recipeutils
245 import bb.process
246
247 for recipe in args.recipename:
248 if recipe in workspace:
249 raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
250
251 tasks = ['do_populate_sysroot', 'do_packagedata']
252 stampprefixes = {}
253 def checkstamp(recipe):
254 stampprefix = stampprefixes[recipe]
255 stamps = glob.glob(stampprefix + '*')
256 for stamp in stamps:
257 if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
258 return True
259 else:
260 return False
261
262 install_recipes = []
263 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
264 try:
265 for recipe in args.recipename:
266 rd = parse_recipe(config, tinfoil, recipe, True)
267 if not rd:
268 return 1
269 stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0])
270 if checkstamp(recipe):
271 logger.info('%s is already installed' % recipe)
272 else:
273 install_recipes.append(recipe)
274 finally:
275 tinfoil.shutdown()
276
277 if install_recipes:
278 logger.info('Installing %s...' % ', '.join(install_recipes))
279 install_tasks = []
280 for recipe in install_recipes:
281 for task in tasks:
282 if recipe.endswith('-native') and 'package' in task:
283 continue
284 install_tasks.append('%s:%s' % (recipe, task))
285 options = ''
286 if not args.allow_build:
287 options += ' --setscene-only'
288 try:
289 exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
290 except bb.process.ExecutionError as e:
291 raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
292 failed = False
293 for recipe in install_recipes:
294 if checkstamp(recipe):
295 logger.info('Successfully installed %s' % recipe)
296 else:
297 raise DevtoolError('Failed to install %s - unavailable' % recipe)
298 failed = True
299 if failed:
300 return 2
301
302 try:
303 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
304 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
305 except bb.process.ExecutionError as e:
306 raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
307
308
309def register_commands(subparsers, context):
310 """Register devtool subcommands from the sdk plugin"""
311 if context.fixed_setup:
312 parser_sdk = subparsers.add_parser('sdk-update',
313 help='Update SDK components',
314 description='Updates installed SDK components from a remote server',
315 group='sdk')
316 updateserver = context.config.get('SDK', 'updateserver', '')
317 if updateserver:
318 parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
319 else:
320 parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
321 parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
322 parser_sdk.set_defaults(func=sdk_update)
323
324 parser_sdk_install = subparsers.add_parser('sdk-install',
325 help='Install additional SDK components',
326 description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
327 group='sdk')
328 parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
329 parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
330 parser_sdk_install.set_defaults(func=sdk_install)
diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py
deleted file mode 100644
index 70b81cac5e..0000000000
--- a/scripts/lib/devtool/search.py
+++ /dev/null
@@ -1,109 +0,0 @@
1# Development tool - search command plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool search plugin"""
9
10import os
11import bb
12import logging
13import argparse
14import re
15from devtool import setup_tinfoil, parse_recipe, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19def search(args, config, basepath, workspace):
20 """Entry point for the devtool 'search' subcommand"""
21
22 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
23 try:
24 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
25 defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
26
27 keyword_rc = re.compile(args.keyword)
28
29 def print_match(pn):
30 rd = parse_recipe(config, tinfoil, pn, True)
31 if not rd:
32 return
33 summary = rd.getVar('SUMMARY')
34 if summary == rd.expand(defsummary):
35 summary = ''
36 print("%s %s" % (pn.ljust(20), summary))
37
38
39 matches = []
40 if os.path.exists(pkgdata_dir):
41 for fn in os.listdir(pkgdata_dir):
42 pfn = os.path.join(pkgdata_dir, fn)
43 if not os.path.isfile(pfn):
44 continue
45
46 packages = []
47 match = False
48 if keyword_rc.search(fn):
49 match = True
50
51 if not match:
52 with open(pfn, 'r') as f:
53 for line in f:
54 if line.startswith('PACKAGES:'):
55 packages = line.split(':', 1)[1].strip().split()
56
57 for pkg in packages:
58 if keyword_rc.search(pkg):
59 match = True
60 break
61 if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
62 with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
63 for line in f:
64 if ': ' in line:
65 splitline = line.split(': ', 1)
66 key = splitline[0]
67 value = splitline[1].strip()
68 key = key.replace(":" + pkg, "")
69 if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']:
70 if keyword_rc.search(value):
71 match = True
72 break
73 if match:
74 print_match(fn)
75 matches.append(fn)
76 else:
77 logger.warning('Package data is not available, results may be limited')
78
79 for recipe in tinfoil.all_recipes():
80 if args.fixed_setup and 'nativesdk' in recipe.inherits():
81 continue
82
83 match = False
84 if keyword_rc.search(recipe.pn):
85 match = True
86 else:
87 for prov in recipe.provides:
88 if keyword_rc.search(prov):
89 match = True
90 break
91 if not match:
92 for rprov in recipe.rprovides:
93 if keyword_rc.search(rprov):
94 match = True
95 break
96 if match and not recipe.pn in matches:
97 print_match(recipe.pn)
98 finally:
99 tinfoil.shutdown()
100
101 return 0
102
103def register_commands(subparsers, context):
104 """Register devtool subcommands from this plugin"""
105 parser_search = subparsers.add_parser('search', help='Search available recipes',
106 description='Searches for available recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name and summary on match.',
107 group='info')
108 parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed, use quotes to avoid shell expansion)')
109 parser_search.set_defaults(func=search, no_workspace=True, fixed_setup=context.fixed_setup)
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
deleted file mode 100644
index cdfdba43ee..0000000000
--- a/scripts/lib/devtool/standard.py
+++ /dev/null
@@ -1,2396 +0,0 @@
1# Development tool - standard commands plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool standard plugins"""
8
9import os
10import sys
11import re
12import shutil
13import subprocess
14import tempfile
15import logging
16import argparse
17import argparse_oe
18import scriptutils
19import errno
20import glob
21from collections import OrderedDict
22
23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError
24from devtool import parse_recipe
25
26import bb.utils
27
28logger = logging.getLogger('devtool')
29
30override_branch_prefix = 'devtool-override-'
31
32
33def add(args, config, basepath, workspace):
34 """Entry point for the devtool 'add' subcommand"""
35 import bb.data
36 import bb.process
37 import oe.recipeutils
38
39 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri:
40 raise argparse_oe.ArgumentUsageError('At least one of recipename, srctree, fetchuri or -f/--fetch must be specified', 'add')
41
42 # These are positional arguments, but because we're nice, allow
43 # specifying e.g. source tree without name, or fetch URI without name or
44 # source tree (if we can detect that that is what the user meant)
45 if scriptutils.is_src_url(args.recipename):
46 if not args.fetchuri:
47 if args.fetch:
48 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
49 args.fetchuri = args.recipename
50 args.recipename = ''
51 elif scriptutils.is_src_url(args.srctree):
52 if not args.fetchuri:
53 if args.fetch:
54 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
55 args.fetchuri = args.srctree
56 args.srctree = ''
57 elif args.recipename and not args.srctree:
58 if os.sep in args.recipename:
59 args.srctree = args.recipename
60 args.recipename = None
61 elif os.path.isdir(args.recipename):
62 logger.warning('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
63
64 if not args.fetchuri:
65 if args.srcrev:
66 raise DevtoolError('The -S/--srcrev option is only valid when fetching from an SCM repository')
67 if args.srcbranch:
68 raise DevtoolError('The -B/--srcbranch option is only valid when fetching from an SCM repository')
69
70 if args.srctree and os.path.isfile(args.srctree):
71 args.fetchuri = 'file://' + os.path.abspath(args.srctree)
72 args.srctree = ''
73
74 if args.fetch:
75 if args.fetchuri:
76 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
77 else:
78 logger.warning('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead')
79 args.fetchuri = args.fetch
80
81 if args.recipename:
82 if args.recipename in workspace:
83 raise DevtoolError("recipe %s is already in your workspace" %
84 args.recipename)
85 reason = oe.recipeutils.validate_pn(args.recipename)
86 if reason:
87 raise DevtoolError(reason)
88
89 if args.srctree:
90 srctree = os.path.abspath(args.srctree)
91 srctreeparent = None
92 tmpsrcdir = None
93 else:
94 srctree = None
95 srctreeparent = get_default_srctree(config)
96 bb.utils.mkdirhier(srctreeparent)
97 tmpsrcdir = tempfile.mkdtemp(prefix='devtoolsrc', dir=srctreeparent)
98
99 if srctree and os.path.exists(srctree):
100 if args.fetchuri:
101 if not os.path.isdir(srctree):
102 raise DevtoolError("Cannot fetch into source tree path %s as "
103 "it exists and is not a directory" %
104 srctree)
105 elif os.listdir(srctree):
106 raise DevtoolError("Cannot fetch into source tree path %s as "
107 "it already exists and is non-empty" %
108 srctree)
109 elif not args.fetchuri:
110 if args.srctree:
111 raise DevtoolError("Specified source tree %s could not be found" %
112 args.srctree)
113 elif srctree:
114 raise DevtoolError("No source tree exists at default path %s - "
115 "either create and populate this directory, "
116 "or specify a path to a source tree, or a "
117 "URI to fetch source from" % srctree)
118 else:
119 raise DevtoolError("You must either specify a source tree "
120 "or a URI to fetch source from")
121
122 if args.version:
123 if '_' in args.version or ' ' in args.version:
124 raise DevtoolError('Invalid version string "%s"' % args.version)
125
126 if args.color == 'auto' and sys.stdout.isatty():
127 color = 'always'
128 else:
129 color = args.color
130 extracmdopts = ''
131 if args.fetchuri:
132 source = args.fetchuri
133 if srctree:
134 extracmdopts += ' -x %s' % srctree
135 else:
136 extracmdopts += ' -x %s' % tmpsrcdir
137 else:
138 source = srctree
139 if args.recipename:
140 extracmdopts += ' -N %s' % args.recipename
141 if args.version:
142 extracmdopts += ' -V %s' % args.version
143 if args.binary:
144 extracmdopts += ' -b'
145 if args.also_native:
146 extracmdopts += ' --also-native'
147 if args.src_subdir:
148 extracmdopts += ' --src-subdir "%s"' % args.src_subdir
149 if args.autorev:
150 extracmdopts += ' -a'
151 if args.npm_dev:
152 extracmdopts += ' --npm-dev'
153 if args.no_pypi:
154 extracmdopts += ' --no-pypi'
155 if args.mirrors:
156 extracmdopts += ' --mirrors'
157 if args.srcrev:
158 extracmdopts += ' --srcrev %s' % args.srcrev
159 if args.srcbranch:
160 extracmdopts += ' --srcbranch %s' % args.srcbranch
161 if args.provides:
162 extracmdopts += ' --provides %s' % args.provides
163
164 tempdir = tempfile.mkdtemp(prefix='devtool')
165 try:
166 try:
167 stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True)
168 except bb.process.ExecutionError as e:
169 if e.exitcode == 15:
170 raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
171 else:
172 raise DevtoolError('Command \'%s\' failed' % e.command)
173
174 recipes = glob.glob(os.path.join(tempdir, '*.bb'))
175 if recipes:
176 recipename = os.path.splitext(os.path.basename(recipes[0]))[0].split('_')[0]
177 if recipename in workspace:
178 raise DevtoolError('A recipe with the same name as the one being created (%s) already exists in your workspace' % recipename)
179 recipedir = os.path.join(config.workspace_path, 'recipes', recipename)
180 bb.utils.mkdirhier(recipedir)
181 recipefile = os.path.join(recipedir, os.path.basename(recipes[0]))
182 appendfile = recipe_to_append(recipefile, config)
183 if os.path.exists(appendfile):
184 # This shouldn't be possible, but just in case
185 raise DevtoolError('A recipe with the same name as the one being created already exists in your workspace')
186 if os.path.exists(recipefile):
187 raise DevtoolError('A recipe file %s already exists in your workspace; this shouldn\'t be there - please delete it before continuing' % recipefile)
188 if tmpsrcdir:
189 srctree = os.path.join(srctreeparent, recipename)
190 if os.path.exists(tmpsrcdir):
191 if os.path.exists(srctree):
192 if os.path.isdir(srctree):
193 try:
194 os.rmdir(srctree)
195 except OSError as e:
196 if e.errno == errno.ENOTEMPTY:
197 raise DevtoolError('Source tree path %s already exists and is not empty' % srctree)
198 else:
199 raise
200 else:
201 raise DevtoolError('Source tree path %s already exists and is not a directory' % srctree)
202 logger.info('Using default source tree path %s' % srctree)
203 shutil.move(tmpsrcdir, srctree)
204 else:
205 raise DevtoolError('Couldn\'t find source tree created by recipetool')
206 bb.utils.mkdirhier(recipedir)
207 shutil.move(recipes[0], recipefile)
208 # Move any additional files created by recipetool
209 for fn in os.listdir(tempdir):
210 shutil.move(os.path.join(tempdir, fn), recipedir)
211 else:
212 raise DevtoolError(f'Failed to create a recipe file for source {source}')
213 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
214 if os.path.exists(attic_recipe):
215 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
216 finally:
217 if tmpsrcdir and os.path.exists(tmpsrcdir):
218 shutil.rmtree(tmpsrcdir)
219 shutil.rmtree(tempdir)
220
221 for fn in os.listdir(recipedir):
222 _add_md5(config, recipename, os.path.join(recipedir, fn))
223
224 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
225 try:
226 try:
227 rd = tinfoil.parse_recipe_file(recipefile, False)
228 except Exception as e:
229 logger.error(str(e))
230 rd = None
231 if not rd:
232 # Parsing failed. We just created this recipe and we shouldn't
233 # leave it in the workdir or it'll prevent bitbake from starting
234 movefn = '%s.parsefailed' % recipefile
235 logger.error('Parsing newly created recipe failed, moving recipe to %s for reference. If this looks to be caused by the recipe itself, please report this error.' % movefn)
236 shutil.move(recipefile, movefn)
237 return 1
238
239 if args.fetchuri and not args.no_git:
240 setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data)
241
242 initial_rev = {}
243 if os.path.exists(os.path.join(srctree, '.git')):
244 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
245 initial_rev["."] = stdout.rstrip()
246 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree)
247 for line in stdout.splitlines():
248 (rev, submodule) = line.split()
249 initial_rev[os.path.relpath(submodule, srctree)] = rev
250
251 if args.src_subdir:
252 srctree = os.path.join(srctree, args.src_subdir)
253
254 bb.utils.mkdirhier(os.path.dirname(appendfile))
255 with open(appendfile, 'w') as f:
256 f.write('inherit externalsrc\n')
257 f.write('EXTERNALSRC = "%s"\n' % srctree)
258
259 b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
260 if b_is_s:
261 f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
262 if initial_rev:
263 for key, value in initial_rev.items():
264 f.write('\n# initial_rev %s: %s\n' % (key, value))
265
266 if args.binary:
267 f.write('do_install:append() {\n')
268 f.write(' rm -rf ${D}/.git\n')
269 f.write(' rm -f ${D}/singletask.lock\n')
270 f.write('}\n')
271
272 if bb.data.inherits_class('npm', rd):
273 f.write('python do_configure:append() {\n')
274 f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n')
275 f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n')
276 f.write(' bb.utils.remove(lockfile)\n')
277 f.write('}\n')
278
279 # Check if the new layer provides recipes whose priorities have been
280 # overriden by PREFERRED_PROVIDER.
281 recipe_name = rd.getVar('PN')
282 provides = rd.getVar('PROVIDES')
283 # Search every item defined in PROVIDES
284 for recipe_provided in provides.split():
285 preferred_provider = 'PREFERRED_PROVIDER_' + recipe_provided
286 current_pprovider = rd.getVar(preferred_provider)
287 if current_pprovider and current_pprovider != recipe_name:
288 if args.fixed_setup:
289 #if we are inside the eSDK add the new PREFERRED_PROVIDER in the workspace layer.conf
290 layerconf_file = os.path.join(config.workspace_path, "conf", "layer.conf")
291 with open(layerconf_file, 'a') as f:
292 f.write('%s = "%s"\n' % (preferred_provider, recipe_name))
293 else:
294 logger.warning('Set \'%s\' in order to use the recipe' % preferred_provider)
295 break
296
297 _add_md5(config, recipename, appendfile)
298
299 check_prerelease_version(rd.getVar('PV'), 'devtool add')
300
301 logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
302
303 finally:
304 tinfoil.shutdown()
305
306 return 0
307
308
309def _check_compatible_recipe(pn, d):
310 """Check if the recipe is supported by devtool"""
311 import bb.data
312 if pn == 'perf':
313 raise DevtoolError("The perf recipe does not actually check out "
314 "source and thus cannot be supported by this tool",
315 4)
316
317 if pn in ['kernel-devsrc', 'package-index'] or pn.startswith('gcc-source'):
318 raise DevtoolError("The %s recipe is not supported by this tool" % pn, 4)
319
320 if bb.data.inherits_class('image', d):
321 raise DevtoolError("The %s recipe is an image, and therefore is not "
322 "supported by this tool" % pn, 4)
323
324 if bb.data.inherits_class('populate_sdk', d):
325 raise DevtoolError("The %s recipe is an SDK, and therefore is not "
326 "supported by this tool" % pn, 4)
327
328 if bb.data.inherits_class('packagegroup', d):
329 raise DevtoolError("The %s recipe is a packagegroup, and therefore is "
330 "not supported by this tool" % pn, 4)
331
332 if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'):
333 # Not an incompatibility error per se, so we don't pass the error code
334 raise DevtoolError("externalsrc is currently enabled for the %s "
335 "recipe. This prevents the normal do_patch task "
336 "from working. You will need to disable this "
337 "first." % pn)
338
339def _dry_run_copy(src, dst, dry_run_outdir, base_outdir):
340 """Common function for copying a file to the dry run output directory"""
341 relpath = os.path.relpath(dst, base_outdir)
342 if relpath.startswith('..'):
343 raise Exception('Incorrect base path %s for path %s' % (base_outdir, dst))
344 dst = os.path.join(dry_run_outdir, relpath)
345 dst_d = os.path.dirname(dst)
346 if dst_d:
347 bb.utils.mkdirhier(dst_d)
348 # Don't overwrite existing files, otherwise in the case of an upgrade
349 # the dry-run written out recipe will be overwritten with an unmodified
350 # version
351 if not os.path.exists(dst):
352 shutil.copy(src, dst)
353
354def _move_file(src, dst, dry_run_outdir=None, base_outdir=None):
355 """Move a file. Creates all the directory components of destination path."""
356 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
357 logger.debug('Moving %s to %s%s' % (src, dst, dry_run_suffix))
358 if dry_run_outdir:
359 # We want to copy here, not move
360 _dry_run_copy(src, dst, dry_run_outdir, base_outdir)
361 else:
362 dst_d = os.path.dirname(dst)
363 if dst_d:
364 bb.utils.mkdirhier(dst_d)
365 shutil.move(src, dst)
366
367def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
368 """Copy a file. Creates all the directory components of destination path."""
369 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
370 logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix))
371 if dry_run_outdir:
372 _dry_run_copy(src, dst, dry_run_outdir, base_outdir)
373 else:
374 dst_d = os.path.dirname(dst)
375 if dst_d:
376 bb.utils.mkdirhier(dst_d)
377 shutil.copy(src, dst)
378
379def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
380 """List contents of a git treeish"""
381 import bb.process
382 cmd = ['git', 'ls-tree', '-z', treeish]
383 if recursive:
384 cmd.append('-r')
385 out, _ = bb.process.run(cmd, cwd=repodir)
386 ret = {}
387 if out:
388 for line in out.split('\0'):
389 if line:
390 split = line.split(None, 4)
391 ret[split[3]] = split[0:3]
392 return ret
393
394def _git_modified(repodir):
395 """List the difference between HEAD and the index"""
396 import bb.process
397 cmd = ['git', 'status', '--porcelain']
398 out, _ = bb.process.run(cmd, cwd=repodir)
399 ret = []
400 if out:
401 for line in out.split("\n"):
402 if line and not line.startswith('??'):
403 ret.append(line[3:])
404 return ret
405
406
407def _git_exclude_path(srctree, path):
408 """Return pathspec (list of paths) that excludes certain path"""
409 # NOTE: "Filtering out" files/paths in this way is not entirely reliable -
410 # we don't catch files that are deleted, for example. A more reliable way
411 # to implement this would be to use "negative pathspecs" which were
412 # introduced in Git v1.9.0. Revisit this when/if the required Git version
413 # becomes greater than that.
414 path = os.path.normpath(path)
415 recurse = True if len(path.split(os.path.sep)) > 1 else False
416 git_files = list(_git_ls_tree(srctree, 'HEAD', recurse).keys())
417 if path in git_files:
418 git_files.remove(path)
419 return git_files
420 else:
421 return ['.']
422
423def _ls_tree(directory):
424 """Recursive listing of files in a directory"""
425 ret = []
426 for root, dirs, files in os.walk(directory):
427 ret.extend([os.path.relpath(os.path.join(root, fname), directory) for
428 fname in files])
429 return ret
430
431
432def extract(args, config, basepath, workspace):
433 """Entry point for the devtool 'extract' subcommand"""
434 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
435 if not tinfoil:
436 # Error already shown
437 return 1
438 try:
439 rd = parse_recipe(config, tinfoil, args.recipename, True)
440 if not rd:
441 return 1
442
443 srctree = os.path.abspath(args.srctree)
444 initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
445 logger.info('Source tree extracted to %s' % srctree)
446
447 if initial_rev:
448 return 0
449 else:
450 return 1
451 finally:
452 tinfoil.shutdown()
453
454def sync(args, config, basepath, workspace):
455 """Entry point for the devtool 'sync' subcommand"""
456 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
457 if not tinfoil:
458 # Error already shown
459 return 1
460 try:
461 rd = parse_recipe(config, tinfoil, args.recipename, True)
462 if not rd:
463 return 1
464
465 srctree = os.path.abspath(args.srctree)
466 initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, True, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=True)
467 logger.info('Source tree %s synchronized' % srctree)
468
469 if initial_rev:
470 return 0
471 else:
472 return 1
473 finally:
474 tinfoil.shutdown()
475
476def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
477 """Extract sources of a recipe"""
478 import oe.path
479 import bb.data
480 import bb.process
481
482 pn = d.getVar('PN')
483
484 _check_compatible_recipe(pn, d)
485
486 if sync:
487 if not os.path.exists(srctree):
488 raise DevtoolError("output path %s does not exist" % srctree)
489 else:
490 if os.path.exists(srctree):
491 if not os.path.isdir(srctree):
492 raise DevtoolError("output path %s exists and is not a directory" %
493 srctree)
494 elif os.listdir(srctree):
495 raise DevtoolError("output path %s already exists and is "
496 "non-empty" % srctree)
497
498 if 'noexec' in (d.getVarFlags('do_unpack', False) or []):
499 raise DevtoolError("The %s recipe has do_unpack disabled, unable to "
500 "extract source" % pn, 4)
501
502 if not sync:
503 # Prepare for shutil.move later on
504 bb.utils.mkdirhier(srctree)
505 os.rmdir(srctree)
506
507 extra_overrides = []
508 if not no_overrides:
509 history = d.varhistory.variable('SRC_URI')
510 for event in history:
511 if not 'flag' in event:
512 if event['op'].startswith((':append[', ':prepend[')):
513 override = event['op'].split('[')[1].split(']')[0]
514 if not override.startswith('pn-'):
515 extra_overrides.append(override)
516 # We want to remove duplicate overrides. If a recipe had multiple
517 # SRC_URI_override += values it would cause mulitple instances of
518 # overrides. This doesn't play nicely with things like creating a
519 # branch for every instance of DEVTOOL_EXTRA_OVERRIDES.
520 extra_overrides = list(set(extra_overrides))
521 if extra_overrides:
522 logger.info('SRC_URI contains some conditional appends/prepends - will create branches to represent these')
523
524 initial_rev = None
525
526 recipefile = d.getVar('FILE')
527 appendfile = recipe_to_append(recipefile, config)
528 is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
529
530 # We need to redirect WORKDIR, STAMPS_DIR etc. under a temporary
531 # directory so that:
532 # (a) we pick up all files that get unpacked to the WORKDIR, and
533 # (b) we don't disturb the existing build
534 # However, with recipe-specific sysroots the sysroots for the recipe
535 # will be prepared under WORKDIR, and if we used the system temporary
536 # directory (i.e. usually /tmp) as used by mkdtemp by default, then
537 # our attempts to hardlink files into the recipe-specific sysroots
538 # will fail on systems where /tmp is a different filesystem, and it
539 # would have to fall back to copying the files which is a waste of
540 # time. Put the temp directory under the WORKDIR to prevent that from
541 # being a problem.
542 tempbasedir = d.getVar('WORKDIR')
543 bb.utils.mkdirhier(tempbasedir)
544 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir)
545 appendbackup = None
546 try:
547 tinfoil.logger.setLevel(logging.WARNING)
548
549 # FIXME this results in a cache reload under control of tinfoil, which is fine
550 # except we don't get the knotty progress bar
551
552 if os.path.exists(appendfile):
553 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak')
554 shutil.copyfile(appendfile, appendbackup)
555 else:
556 bb.utils.mkdirhier(os.path.dirname(appendfile))
557 logger.debug('writing append file %s' % appendfile)
558 with open(appendfile, 'a') as f:
559 f.write('###--- _extract_source\n')
560 f.write('deltask do_recipe_qa\n')
561 f.write('deltask do_recipe_qa_setscene\n')
562 f.write('ERROR_QA:remove = "patch-fuzz"\n')
563 f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
564 f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
565 if not is_kernel_yocto:
566 f.write('PATCHTOOL = "git"\n')
567 f.write('PATCH_COMMIT_FUNCTIONS = "1"\n')
568 if extra_overrides:
569 f.write('DEVTOOL_EXTRA_OVERRIDES = "%s"\n' % ':'.join(extra_overrides))
570 f.write('inherit devtool-source\n')
571 f.write('###--- _extract_source\n')
572
573 update_unlockedsigs(basepath, workspace, fixed_setup, [pn])
574
575 sstate_manifests = d.getVar('SSTATE_MANIFESTS')
576 bb.utils.mkdirhier(sstate_manifests)
577 preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
578 with open(preservestampfile, 'w') as f:
579 f.write(d.getVar('STAMP'))
580 tinfoil.modified_files()
581 try:
582 if is_kernel_yocto:
583 # We need to generate the kernel config
584 task = 'do_configure'
585 else:
586 task = 'do_patch'
587
588 if 'noexec' in (d.getVarFlags(task, False) or []) or 'task' not in (d.getVarFlags(task, False) or []):
589 logger.info('The %s recipe has %s disabled. Running only '
590 'do_configure task dependencies' % (pn, task))
591
592 if 'depends' in d.getVarFlags('do_configure', False):
593 pn = d.getVarFlags('do_configure', False)['depends']
594 pn = pn.replace('${PV}', d.getVar('PV'))
595 pn = pn.replace('${COMPILERDEP}', d.getVar('COMPILERDEP'))
596 task = None
597
598 # Run the fetch + unpack tasks
599 res = tinfoil.build_targets(pn,
600 task,
601 handle_events=True)
602 finally:
603 if os.path.exists(preservestampfile):
604 os.remove(preservestampfile)
605
606 if not res:
607 raise DevtoolError('Extracting source for %s failed' % pn)
608
609 if not is_kernel_yocto and ('noexec' in (d.getVarFlags('do_patch', False) or []) or 'task' not in (d.getVarFlags('do_patch', False) or [])):
610 workshareddir = d.getVar('S')
611 if os.path.islink(srctree):
612 os.unlink(srctree)
613
614 os.symlink(workshareddir, srctree)
615
616 # The initial_rev file is created in devtool_post_unpack function that will not be executed if
617 # do_unpack/do_patch tasks are disabled so we have to directly say that source extraction was successful
618 return True, True
619
620 try:
621 with open(os.path.join(tempdir, 'initial_rev'), 'r') as f:
622 initial_rev = f.read()
623
624 with open(os.path.join(tempdir, 'srcsubdir'), 'r') as f:
625 srcsubdir = f.read()
626 except FileNotFoundError as e:
627 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
628 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir'))
629
630 # Check if work-shared is empty, if yes
631 # find source and copy to work-shared
632 if is_kernel_yocto:
633 workshareddir = d.getVar('STAGING_KERNEL_DIR')
634 staging_kerVer = get_staging_kver(workshareddir)
635 kernelVersion = d.getVar('LINUX_VERSION')
636
637 # handle dangling symbolic link in work-shared:
638 if os.path.islink(workshareddir):
639 os.unlink(workshareddir)
640
641 if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
642 shutil.rmtree(workshareddir)
643 oe.path.copyhardlinktree(srcsubdir, workshareddir)
644 elif not os.path.exists(workshareddir):
645 oe.path.copyhardlinktree(srcsubdir, workshareddir)
646
647 if sync:
648 try:
649 logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch))
650 bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree)
651
652 # Use git fetch to update the source with the current recipe
653 # To be able to update the currently checked out branch with
654 # possibly new history (no fast-forward) git needs to be told
655 # that's ok
656 logger.info('Syncing source files including patches to git branch: %s' % devbranch)
657 bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
658 except bb.process.ExecutionError as e:
659 raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e))
660
661 else:
662 shutil.move(srcsubdir, srctree)
663
664 if is_kernel_yocto:
665 logger.info('Copying kernel config to srctree')
666 shutil.copy2(os.path.join(tempdir, '.config'), srctree)
667
668 finally:
669 if appendbackup:
670 shutil.copyfile(appendbackup, appendfile)
671 elif os.path.exists(appendfile):
672 os.remove(appendfile)
673 if keep_temp:
674 logger.info('Preserving temporary directory %s' % tempdir)
675 else:
676 shutil.rmtree(tempdir)
677 return initial_rev, srcsubdir_rel
678
679def _add_md5(config, recipename, filename):
680 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace"""
681 def addfile(fn):
682 md5 = bb.utils.md5_file(fn)
683 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f:
684 md5_str = '%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5)
685 f.seek(0, os.SEEK_SET)
686 if not md5_str in f.read():
687 f.write(md5_str)
688
689 if os.path.isdir(filename):
690 for root, _, files in os.walk(filename):
691 for f in files:
692 addfile(os.path.join(root, f))
693 else:
694 addfile(filename)
695
696def _check_preserve(config, recipename):
697 """Check if a file was manually changed and needs to be saved in 'attic'
698 directory"""
699 origfile = os.path.join(config.workspace_path, '.devtool_md5')
700 newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
701 preservepath = os.path.join(config.workspace_path, 'attic', recipename)
702 with open(origfile, 'r') as f:
703 with open(newfile, 'w') as tf:
704 for line in f.readlines():
705 splitline = line.rstrip().split('|')
706 if splitline[0] == recipename:
707 removefile = os.path.join(config.workspace_path, splitline[1])
708 try:
709 md5 = bb.utils.md5_file(removefile)
710 except IOError as err:
711 if err.errno == 2:
712 # File no longer exists, skip it
713 continue
714 else:
715 raise
716 if splitline[2] != md5:
717 bb.utils.mkdirhier(preservepath)
718 preservefile = os.path.basename(removefile)
719 logger.warning('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
720 shutil.move(removefile, os.path.join(preservepath, preservefile))
721 else:
722 os.remove(removefile)
723 else:
724 tf.write(line)
725 bb.utils.rename(newfile, origfile)
726
727def get_staging_kver(srcdir):
728 # Kernel version from work-shared
729 import itertools
730 try:
731 with open(os.path.join(srcdir, "Makefile")) as f:
732 # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3
733 return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4))
734 except FileNotFoundError:
735 return ""
736
737def get_staging_kbranch(srcdir):
738 import bb.process
739 staging_kbranch = ""
740 if os.path.exists(srcdir) and os.listdir(srcdir):
741 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
742 staging_kbranch = "".join(branch.split('\n')[0])
743 return staging_kbranch
744
745def get_real_srctree(srctree, s, workdir):
746 # Check that recipe isn't using a shared workdir
747 s = os.path.abspath(s)
748 workdir = os.path.abspath(workdir)
749 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
750 # Handle if S is set to a subdirectory of the source
751 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
752 srctree = os.path.join(srctree, srcsubdir)
753 return srctree
754
755def modify(args, config, basepath, workspace):
756 """Entry point for the devtool 'modify' subcommand"""
757 import bb.data
758 import bb.process
759 import oe.recipeutils
760 import oe.patch
761 import oe.path
762
763 if args.recipename in workspace:
764 raise DevtoolError("recipe %s is already in your workspace" %
765 args.recipename)
766
767 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
768 try:
769 rd = parse_recipe(config, tinfoil, args.recipename, True)
770 if not rd:
771 return 1
772
773 pn = rd.getVar('PN')
774 if pn != args.recipename:
775 logger.info('Mapping %s to %s' % (args.recipename, pn))
776 if pn in workspace:
777 raise DevtoolError("recipe %s is already in your workspace" %
778 pn)
779
780 if args.srctree:
781 srctree = os.path.abspath(args.srctree)
782 else:
783 srctree = get_default_srctree(config, pn)
784
785 if args.no_extract and not os.path.isdir(srctree):
786 raise DevtoolError("--no-extract specified and source path %s does "
787 "not exist or is not a directory" %
788 srctree)
789
790 recipefile = rd.getVar('FILE')
791 appendfile = recipe_to_append(recipefile, config, args.wildcard)
792 if os.path.exists(appendfile):
793 raise DevtoolError("Another variant of recipe %s is already in your "
794 "workspace (only one variant of a recipe can "
795 "currently be worked on at once)"
796 % pn)
797
798 _check_compatible_recipe(pn, rd)
799
800 initial_revs = {}
801 commits = {}
802 check_commits = False
803
804 if bb.data.inherits_class('kernel-yocto', rd):
805 # Current set kernel version
806 kernelVersion = rd.getVar('LINUX_VERSION')
807 srcdir = rd.getVar('STAGING_KERNEL_DIR')
808 kbranch = rd.getVar('KBRANCH')
809
810 staging_kerVer = get_staging_kver(srcdir)
811 staging_kbranch = get_staging_kbranch(srcdir)
812 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
813 oe.path.copyhardlinktree(srcdir, srctree)
814 unpackdir = rd.getVar('UNPACKDIR')
815 srcsubdir = rd.getVar('S')
816
817 # Add locally copied files to gitignore as we add back to the metadata directly
818 local_files = oe.recipeutils.get_recipe_local_files(rd)
819 srcabspath = os.path.abspath(srcsubdir)
820 local_files = [fname for fname in local_files if
821 os.path.exists(os.path.join(unpackdir, fname)) and
822 srcabspath == unpackdir]
823 if local_files:
824 with open(os.path.join(srctree, '.gitignore'), 'a+') as f:
825 f.write('# Ignore local files, by default. Remove following lines'
826 'if you want to commit the directory to Git\n')
827 for fname in local_files:
828 f.write('%s\n' % fname)
829
830 task = 'do_configure'
831 res = tinfoil.build_targets(pn, task, handle_events=True)
832
833 # Copy .config to workspace
834 kconfpath = rd.getVar('B')
835 logger.info('Copying kernel config to workspace')
836 shutil.copy2(os.path.join(kconfpath, '.config'), srctree)
837
838 # Set this to true, we still need to get initial_rev
839 # by parsing the git repo
840 args.no_extract = True
841
842 if not args.no_extract:
843 initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
844 if not initial_revs["."]:
845 return 1
846 logger.info('Source tree extracted to %s' % srctree)
847
848 if os.path.exists(os.path.join(srctree, '.git')):
849 # Get list of commits since this revision
850 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
851 commits["."] = stdout.split()
852 check_commits = True
853 try:
854 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
855 except bb.process.ExecutionError:
856 stdout = ""
857 for line in stdout.splitlines():
858 (rev, submodule_path) = line.split()
859 submodule = os.path.relpath(submodule_path, srctree)
860 initial_revs[submodule] = rev
861 (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path)
862 commits[submodule] = stdout.split()
863 else:
864 if os.path.exists(os.path.join(srctree, '.git')):
865 # Check if it's a tree previously extracted by us. This is done
866 # by ensuring that devtool-base and args.branch (devtool) exist.
867 # The check_commits logic will cause an exception if either one
868 # of these doesn't exist
869 try:
870 (stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=srctree)
871 bb.process.run('git rev-parse %s' % args.branch, cwd=srctree)
872 except bb.process.ExecutionError:
873 stdout = ''
874 if stdout:
875 check_commits = True
876 for line in stdout.splitlines():
877 if line.startswith('*'):
878 (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
879 initial_revs["."] = stdout.rstrip()
880 if "." not in initial_revs:
881 # Otherwise, just grab the head revision
882 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
883 initial_revs["."] = stdout.rstrip()
884
885 branch_patches = {}
886 if check_commits:
887 # Check if there are override branches
888 (stdout, _) = bb.process.run('git branch', cwd=srctree)
889 branches = []
890 for line in stdout.rstrip().splitlines():
891 branchname = line[2:].rstrip()
892 if branchname.startswith(override_branch_prefix):
893 branches.append(branchname)
894 if branches:
895 logger.warning('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix))
896 branches.insert(0, args.branch)
897 seen_patches = []
898 for branch in branches:
899 branch_patches[branch] = []
900 (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree)
901 for sha1 in stdout.splitlines():
902 notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip())
903 origpatch = notes.get(oe.patch.GitApplyTree.original_patch)
904 if origpatch and origpatch not in seen_patches:
905 seen_patches.append(origpatch)
906 branch_patches[branch].append(origpatch)
907
908 # Need to grab this here in case the source is within a subdirectory
909 srctreebase = srctree
910 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
911
912 bb.utils.mkdirhier(os.path.dirname(appendfile))
913 with open(appendfile, 'w') as f:
914 # if not present, add type=git-dependency to the secondary sources
915 # (non local files) so they can be extracted correctly when building a recipe after
916 # doing a devtool modify on it
917 src_uri = rd.getVar('SRC_URI').split()
918 src_uri_append = []
919 src_uri_remove = []
920
921 # Assume first entry is main source extracted in ${S} so skip it
922 src_uri = src_uri[1::]
923
924 # Add "type=git-dependency" to all non local sources
925 for url in src_uri:
926 if not url.startswith('file://') and not 'type=' in url:
927 src_uri_remove.append(url)
928 src_uri_append.append('%s;type=git-dependency' % url)
929
930 if src_uri_remove:
931 f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove))
932 f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append))
933
934 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n')
935 # Local files can be modified/tracked in separate subdir under srctree
936 # Mostly useful for packages with S != WORKDIR
937 f.write('FILESPATH:prepend := "%s:"\n' %
938 os.path.join(srctreebase, 'oe-local-files'))
939 f.write('# srctreebase: %s\n' % srctreebase)
940
941 f.write('\ninherit externalsrc\n')
942 f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
943 f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
944
945 b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
946 if b_is_s:
947 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
948
949 if bb.data.inherits_class('kernel', rd):
950 f.write('\ndo_kernel_configme:prepend() {\n'
951 ' if [ -e ${S}/.config ]; then\n'
952 ' mv ${S}/.config ${S}/.config.old\n'
953 ' fi\n'
954 '}\n')
955 if rd.getVarFlag('do_menuconfig', 'task'):
956 f.write('\ndo_configure:append() {\n'
957 ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n'
958 ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n'
959 ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n'
960 ' fi\n'
961 '}\n')
962 if initial_revs:
963 for name, rev in initial_revs.items():
964 f.write('\n# initial_rev %s: %s\n' % (name, rev))
965 if name in commits:
966 for commit in commits[name]:
967 f.write('# commit %s: %s\n' % (name, commit))
968 if branch_patches:
969 for branch in branch_patches:
970 if branch == args.branch:
971 continue
972 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch])))
973 if args.debug_build:
974 f.write('\nDEBUG_BUILD = "1"\n')
975
976 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
977
978 _add_md5(config, pn, appendfile)
979
980 logger.info('Recipe %s now set up to build from %s' % (pn, srctree))
981
982 finally:
983 tinfoil.shutdown()
984
985 return 0
986
987
988def rename(args, config, basepath, workspace):
989 """Entry point for the devtool 'rename' subcommand"""
990 import bb
991 import oe.recipeutils
992
993 check_workspace_recipe(workspace, args.recipename)
994
995 if not (args.newname or args.version):
996 raise DevtoolError('You must specify a new name, a version with -V/--version, or both')
997
998 recipefile = workspace[args.recipename]['recipefile']
999 if not recipefile:
1000 raise DevtoolError('devtool rename can only be used where the recipe file itself is in the workspace (e.g. after devtool add)')
1001
1002 if args.newname and args.newname != args.recipename:
1003 reason = oe.recipeutils.validate_pn(args.newname)
1004 if reason:
1005 raise DevtoolError(reason)
1006 newname = args.newname
1007 else:
1008 newname = args.recipename
1009
1010 append = workspace[args.recipename]['bbappend']
1011 appendfn = os.path.splitext(os.path.basename(append))[0]
1012 splitfn = appendfn.split('_')
1013 if len(splitfn) > 1:
1014 origfnver = appendfn.split('_')[1]
1015 else:
1016 origfnver = ''
1017
1018 recipefilemd5 = None
1019 newrecipefilemd5 = None
1020 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1021 try:
1022 rd = parse_recipe(config, tinfoil, args.recipename, True)
1023 if not rd:
1024 return 1
1025
1026 bp = rd.getVar('BP')
1027 bpn = rd.getVar('BPN')
1028 if newname != args.recipename:
1029 localdata = rd.createCopy()
1030 localdata.setVar('PN', newname)
1031 newbpn = localdata.getVar('BPN')
1032 else:
1033 newbpn = bpn
1034 s = rd.getVar('S', False)
1035 src_uri = rd.getVar('SRC_URI', False)
1036 pv = rd.getVar('PV')
1037
1038 # Correct variable values that refer to the upstream source - these
1039 # values must stay the same, so if the name/version are changing then
1040 # we need to fix them up
1041 new_s = s
1042 new_src_uri = src_uri
1043 if newbpn != bpn:
1044 # ${PN} here is technically almost always incorrect, but people do use it
1045 new_s = new_s.replace('${BPN}', bpn)
1046 new_s = new_s.replace('${PN}', bpn)
1047 new_s = new_s.replace('${BP}', '%s-${PV}' % bpn)
1048 new_src_uri = new_src_uri.replace('${BPN}', bpn)
1049 new_src_uri = new_src_uri.replace('${PN}', bpn)
1050 new_src_uri = new_src_uri.replace('${BP}', '%s-${PV}' % bpn)
1051 if args.version and origfnver == pv:
1052 new_s = new_s.replace('${PV}', pv)
1053 new_s = new_s.replace('${BP}', '${BPN}-%s' % pv)
1054 new_src_uri = new_src_uri.replace('${PV}', pv)
1055 new_src_uri = new_src_uri.replace('${BP}', '${BPN}-%s' % pv)
1056 patchfields = {}
1057 if new_s != s:
1058 patchfields['S'] = new_s
1059 if new_src_uri != src_uri:
1060 patchfields['SRC_URI'] = new_src_uri
1061 if patchfields:
1062 recipefilemd5 = bb.utils.md5_file(recipefile)
1063 oe.recipeutils.patch_recipe(rd, recipefile, patchfields)
1064 newrecipefilemd5 = bb.utils.md5_file(recipefile)
1065 finally:
1066 tinfoil.shutdown()
1067
1068 if args.version:
1069 newver = args.version
1070 else:
1071 newver = origfnver
1072
1073 if newver:
1074 newappend = '%s_%s.bbappend' % (newname, newver)
1075 newfile = '%s_%s.bb' % (newname, newver)
1076 else:
1077 newappend = '%s.bbappend' % newname
1078 newfile = '%s.bb' % newname
1079
1080 oldrecipedir = os.path.dirname(recipefile)
1081 newrecipedir = os.path.join(config.workspace_path, 'recipes', newname)
1082 if oldrecipedir != newrecipedir:
1083 bb.utils.mkdirhier(newrecipedir)
1084
1085 newappend = os.path.join(os.path.dirname(append), newappend)
1086 newfile = os.path.join(newrecipedir, newfile)
1087
1088 # Rename bbappend
1089 logger.info('Renaming %s to %s' % (append, newappend))
1090 bb.utils.rename(append, newappend)
1091 # Rename recipe file
1092 logger.info('Renaming %s to %s' % (recipefile, newfile))
1093 bb.utils.rename(recipefile, newfile)
1094
1095 # Rename source tree if it's the default path
1096 appendmd5 = None
1097 newappendmd5 = None
1098 if not args.no_srctree:
1099 srctree = workspace[args.recipename]['srctree']
1100 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename):
1101 newsrctree = os.path.join(config.workspace_path, 'sources', newname)
1102 logger.info('Renaming %s to %s' % (srctree, newsrctree))
1103 shutil.move(srctree, newsrctree)
1104 # Correct any references (basically EXTERNALSRC*) in the .bbappend
1105 appendmd5 = bb.utils.md5_file(newappend)
1106 appendlines = []
1107 with open(newappend, 'r') as f:
1108 for line in f:
1109 appendlines.append(line)
1110 with open(newappend, 'w') as f:
1111 for line in appendlines:
1112 if srctree in line:
1113 line = line.replace(srctree, newsrctree)
1114 f.write(line)
1115 newappendmd5 = bb.utils.md5_file(newappend)
1116
1117 bpndir = None
1118 newbpndir = None
1119 if newbpn != bpn:
1120 bpndir = os.path.join(oldrecipedir, bpn)
1121 if os.path.exists(bpndir):
1122 newbpndir = os.path.join(newrecipedir, newbpn)
1123 logger.info('Renaming %s to %s' % (bpndir, newbpndir))
1124 shutil.move(bpndir, newbpndir)
1125
1126 bpdir = None
1127 newbpdir = None
1128 if newver != origfnver or newbpn != bpn:
1129 bpdir = os.path.join(oldrecipedir, bp)
1130 if os.path.exists(bpdir):
1131 newbpdir = os.path.join(newrecipedir, '%s-%s' % (newbpn, newver))
1132 logger.info('Renaming %s to %s' % (bpdir, newbpdir))
1133 shutil.move(bpdir, newbpdir)
1134
1135 if oldrecipedir != newrecipedir:
1136 # Move any stray files and delete the old recipe directory
1137 for entry in os.listdir(oldrecipedir):
1138 oldpath = os.path.join(oldrecipedir, entry)
1139 newpath = os.path.join(newrecipedir, entry)
1140 logger.info('Renaming %s to %s' % (oldpath, newpath))
1141 shutil.move(oldpath, newpath)
1142 os.rmdir(oldrecipedir)
1143
1144 # Now take care of entries in .devtool_md5
1145 md5entries = []
1146 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'r') as f:
1147 for line in f:
1148 md5entries.append(line)
1149
1150 if bpndir and newbpndir:
1151 relbpndir = os.path.relpath(bpndir, config.workspace_path) + '/'
1152 else:
1153 relbpndir = None
1154 if bpdir and newbpdir:
1155 relbpdir = os.path.relpath(bpdir, config.workspace_path) + '/'
1156 else:
1157 relbpdir = None
1158
1159 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'w') as f:
1160 for entry in md5entries:
1161 splitentry = entry.rstrip().split('|')
1162 if len(splitentry) > 2:
1163 if splitentry[0] == args.recipename:
1164 splitentry[0] = newname
1165 if splitentry[1] == os.path.relpath(append, config.workspace_path):
1166 splitentry[1] = os.path.relpath(newappend, config.workspace_path)
1167 if appendmd5 and splitentry[2] == appendmd5:
1168 splitentry[2] = newappendmd5
1169 elif splitentry[1] == os.path.relpath(recipefile, config.workspace_path):
1170 splitentry[1] = os.path.relpath(newfile, config.workspace_path)
1171 if recipefilemd5 and splitentry[2] == recipefilemd5:
1172 splitentry[2] = newrecipefilemd5
1173 elif relbpndir and splitentry[1].startswith(relbpndir):
1174 splitentry[1] = os.path.relpath(os.path.join(newbpndir, splitentry[1][len(relbpndir):]), config.workspace_path)
1175 elif relbpdir and splitentry[1].startswith(relbpdir):
1176 splitentry[1] = os.path.relpath(os.path.join(newbpdir, splitentry[1][len(relbpdir):]), config.workspace_path)
1177 entry = '|'.join(splitentry) + '\n'
1178 f.write(entry)
1179 return 0
1180
1181
1182def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refresh=False):
1183 """Get initial and update rev of a recipe. These are the start point of the
1184 whole patchset and start point for the patches to be re-generated/updated.
1185 """
1186 import bb.process
1187
1188 # Get current branch
1189 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
1190 cwd=srctree)
1191 branchname = stdout.rstrip()
1192
1193 # Parse initial rev from recipe if not specified
1194 commits = {}
1195 patches = []
1196 initial_revs = {}
1197 with open(recipe_path, 'r') as f:
1198 for line in f:
1199 pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$'
1200 match = re.search(pattern, line)
1201 if match:
1202 name = match.group(1)
1203 rev = match.group(2)
1204 if line.startswith('# initial_rev'):
1205 if not (name == "." and initial_rev):
1206 initial_revs[name] = rev
1207 elif line.startswith('# commit') and not force_patch_refresh:
1208 if name not in commits:
1209 commits[name] = [rev]
1210 else:
1211 commits[name].append(rev)
1212 elif line.startswith('# patches_%s:' % branchname):
1213 patches = line.split(':')[-1].strip().split(',')
1214
1215 update_revs = dict(initial_revs)
1216 changed_revs = {}
1217 for name, rev in initial_revs.items():
1218 # Find first actually changed revision
1219 stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
1220 rev, cwd=os.path.join(srctree, name))
1221 newcommits = stdout.split()
1222 if name in commits:
1223 for i in range(min(len(commits[name]), len(newcommits))):
1224 if newcommits[i] == commits[name][i]:
1225 update_revs[name] = commits[name][i]
1226
1227 try:
1228 stdout, _ = bb.process.run('git cherry devtool-patched',
1229 cwd=os.path.join(srctree, name))
1230 except bb.process.ExecutionError as err:
1231 stdout = None
1232
1233 if stdout is not None and not force_patch_refresh:
1234 for line in stdout.splitlines():
1235 if line.startswith('+ '):
1236 rev = line.split()[1]
1237 if rev in newcommits:
1238 if name not in changed_revs:
1239 changed_revs[name] = [rev]
1240 else:
1241 changed_revs[name].append(rev)
1242
1243 return initial_revs, update_revs, changed_revs, patches
1244
1245def _remove_file_entries(srcuri, filelist):
1246 """Remove file:// entries from SRC_URI"""
1247 remaining = filelist[:]
1248 entries = []
1249 for fname in filelist:
1250 basename = os.path.basename(fname)
1251 for i in range(len(srcuri)):
1252 if (srcuri[i].startswith('file://') and
1253 os.path.basename(srcuri[i].split(';')[0]) == basename):
1254 entries.append(srcuri[i])
1255 remaining.remove(fname)
1256 srcuri.pop(i)
1257 break
1258 return entries, remaining
1259
1260def _replace_srcuri_entry(srcuri, filename, newentry):
1261 """Replace entry corresponding to specified file with a new entry"""
1262 basename = os.path.basename(filename)
1263 for i in range(len(srcuri)):
1264 if os.path.basename(srcuri[i].split(';')[0]) == basename:
1265 srcuri.pop(i)
1266 srcuri.insert(i, newentry)
1267 break
1268
1269def _remove_source_files(append, files, destpath, no_report_remove=False, dry_run=False):
1270 """Unlink existing patch files"""
1271
1272 dry_run_suffix = ' (dry-run)' if dry_run else ''
1273
1274 for path in files:
1275 if append:
1276 if not destpath:
1277 raise Exception('destpath should be set here')
1278 path = os.path.join(destpath, os.path.basename(path))
1279
1280 if os.path.exists(path):
1281 if not no_report_remove:
1282 logger.info('Removing file %s%s' % (path, dry_run_suffix))
1283 if not dry_run:
1284 # FIXME "git rm" here would be nice if the file in question is
1285 # tracked
1286 # FIXME there's a chance that this file is referred to by
1287 # another recipe, in which case deleting wouldn't be the
1288 # right thing to do
1289 os.remove(path)
1290 # Remove directory if empty
1291 try:
1292 os.rmdir(os.path.dirname(path))
1293 except OSError as ose:
1294 if ose.errno != errno.ENOTEMPTY:
1295 raise
1296
1297
1298def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1299 """Export patches from srctree to given location.
1300 Returns three-tuple of dicts:
1301 1. updated - patches that already exist in SRCURI
1302 2. added - new patches that don't exist in SRCURI
1303 3 removed - patches that exist in SRCURI but not in exported patches
1304 In each dict the key is the 'basepath' of the URI and value is:
1305 - for updated and added dicts, a dict with 2 optionnal keys:
1306 - 'path': the absolute path to the existing file in recipe space (if any)
1307 - 'patchdir': the directory in wich the patch should be applied (if any)
1308 - for removed dict, the absolute path to the existing file in recipe space
1309 """
1310 import oe.recipeutils
1311 from oe.patch import GitApplyTree
1312 import bb.process
1313 updated = OrderedDict()
1314 added = OrderedDict()
1315 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
1316
1317 existing_patches = dict((os.path.basename(path), path) for path in
1318 oe.recipeutils.get_recipe_patches(rd))
1319 logger.debug('Existing patches: %s' % existing_patches)
1320
1321 # Generate patches from Git, exclude local files directory
1322 patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
1323 GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec)
1324 for dirpath, dirnames, filenames in os.walk(destdir):
1325 new_patches = filenames
1326 reldirpath = os.path.relpath(dirpath, destdir)
1327 for new_patch in new_patches:
1328 # Strip numbering from patch names. If it's a git sequence named patch,
1329 # the numbers might not match up since we are starting from a different
1330 # revision This does assume that people are using unique shortlog
1331 # values, but they ought to be anyway...
1332 new_basename = seqpatch_re.match(new_patch).group(2)
1333 match_name = None
1334 old_patch = None
1335 for old_patch in existing_patches:
1336 old_basename = seqpatch_re.match(old_patch).group(2)
1337 old_basename_splitext = os.path.splitext(old_basename)
1338 if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
1339 old_patch_noext = os.path.splitext(old_patch)[0]
1340 match_name = old_patch_noext
1341 break
1342 elif new_basename == old_basename:
1343 match_name = old_patch
1344 break
1345 if match_name:
1346 # Rename patch files
1347 if new_patch != match_name:
1348 bb.utils.rename(os.path.join(destdir, new_patch),
1349 os.path.join(destdir, match_name))
1350 # Need to pop it off the list now before checking changed_revs
1351 oldpath = existing_patches.pop(old_patch)
1352 if changed_revs is not None and dirpath in changed_revs:
1353 # Avoid updating patches that have not actually changed
1354 with open(os.path.join(dirpath, match_name), 'r') as f:
1355 firstlineitems = f.readline().split()
1356 # Looking for "From <hash>" line
1357 if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
1358 if not firstlineitems[1] in changed_revs[dirpath]:
1359 continue
1360 # Recompress if necessary
1361 if oldpath.endswith(('.gz', '.Z')):
1362 bb.process.run(['gzip', match_name], cwd=destdir)
1363 if oldpath.endswith('.gz'):
1364 match_name += '.gz'
1365 else:
1366 match_name += '.Z'
1367 elif oldpath.endswith('.bz2'):
1368 bb.process.run(['bzip2', match_name], cwd=destdir)
1369 match_name += '.bz2'
1370 updated[match_name] = {'path' : oldpath}
1371 if reldirpath != ".":
1372 updated[match_name]['patchdir'] = reldirpath
1373 else:
1374 added[new_patch] = {}
1375 if reldirpath != ".":
1376 added[new_patch]['patchdir'] = reldirpath
1377
1378 return (updated, added, existing_patches)
1379
1380
1381def _create_kconfig_diff(srctree, rd, outfile):
1382 """Create a kconfig fragment"""
1383 import bb.process
1384 # Only update config fragment if both config files exist
1385 orig_config = os.path.join(srctree, '.config.baseline')
1386 new_config = os.path.join(srctree, '.config.new')
1387 if os.path.exists(orig_config) and os.path.exists(new_config):
1388 cmd = ['diff', '--new-line-format=%L', '--old-line-format=',
1389 '--unchanged-line-format=', orig_config, new_config]
1390 pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
1391 stderr=subprocess.PIPE)
1392 stdout, stderr = pipe.communicate()
1393 if pipe.returncode == 1:
1394 logger.info("Updating config fragment %s" % outfile)
1395 with open(outfile, 'wb') as fobj:
1396 fobj.write(stdout)
1397 elif pipe.returncode == 0:
1398 logger.info("Would remove config fragment %s" % outfile)
1399 if os.path.exists(outfile):
1400 # Remove fragment file in case of empty diff
1401 logger.info("Removing config fragment %s" % outfile)
1402 os.unlink(outfile)
1403 else:
1404 raise bb.process.ExecutionError(cmd, pipe.returncode, stdout, stderr)
1405 return True
1406 return False
1407
1408
1409def _export_local_files(srctree, rd, destdir, srctreebase):
1410 """Copy local files from srctree to given location.
1411 Returns three-tuple of dicts:
1412 1. updated - files that already exist in SRCURI
1413 2. added - new files files that don't exist in SRCURI
1414 3 removed - files that exist in SRCURI but not in exported files
1415 In each dict the key is the 'basepath' of the URI and value is:
1416 - for updated and added dicts, a dict with 1 optionnal key:
1417 - 'path': the absolute path to the existing file in recipe space (if any)
1418 - for removed dict, the absolute path to the existing file in recipe space
1419 """
1420 import oe.recipeutils
1421 import bb.data
1422 import bb.process
1423
1424 # Find out local files (SRC_URI files that exist in the "recipe space").
1425 # Local files that reside in srctree are not included in patch generation.
1426 # Instead they are directly copied over the original source files (in
1427 # recipe space).
1428 existing_files = oe.recipeutils.get_recipe_local_files(rd)
1429
1430 new_set = None
1431 updated = OrderedDict()
1432 added = OrderedDict()
1433 removed = OrderedDict()
1434
1435 # Get current branch and return early with empty lists
1436 # if on one of the override branches
1437 # (local files are provided only for the main branch and processing
1438 # them against lists from recipe overrides will result in mismatches
1439 # and broken modifications to recipes).
1440 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
1441 cwd=srctree)
1442 branchname = stdout.rstrip()
1443 if branchname.startswith(override_branch_prefix):
1444 return (updated, added, removed)
1445
1446 files = _git_modified(srctree)
1447 #if not files:
1448 # files = _ls_tree(srctree)
1449 for f in files:
1450 fullfile = os.path.join(srctree, f)
1451 if os.path.exists(os.path.join(fullfile, ".git")):
1452 # submodules handled elsewhere
1453 continue
1454 if f not in existing_files:
1455 added[f] = {}
1456 if os.path.isdir(os.path.join(srctree, f)):
1457 shutil.copytree(fullfile, os.path.join(destdir, f))
1458 else:
1459 shutil.copy2(fullfile, os.path.join(destdir, f))
1460 elif not os.path.exists(fullfile):
1461 removed[f] = existing_files[f]
1462 elif f in existing_files:
1463 updated[f] = {'path' : existing_files[f]}
1464 if os.path.isdir(os.path.join(srctree, f)):
1465 shutil.copytree(fullfile, os.path.join(destdir, f))
1466 else:
1467 shutil.copy2(fullfile, os.path.join(destdir, f))
1468
1469 # Special handling for kernel config
1470 if bb.data.inherits_class('kernel-yocto', rd):
1471 fragment_fn = 'devtool-fragment.cfg'
1472 fragment_path = os.path.join(destdir, fragment_fn)
1473 if _create_kconfig_diff(srctree, rd, fragment_path):
1474 if os.path.exists(fragment_path):
1475 if fragment_fn in removed:
1476 del removed[fragment_fn]
1477 if fragment_fn not in updated and fragment_fn not in added:
1478 added[fragment_fn] = {}
1479 else:
1480 if fragment_fn in updated:
1481 removed[fragment_fn] = updated[fragment_fn]
1482 del updated[fragment_fn]
1483
1484 # Special handling for cml1, ccmake, etc bbclasses that generated
1485 # configuration fragment files that are consumed as source files
1486 for frag_class, frag_name in [("cml1", "fragment.cfg"), ("ccmake", "site-file.cmake")]:
1487 if bb.data.inherits_class(frag_class, rd):
1488 srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name)
1489 if os.path.exists(srcpath):
1490 if frag_name in removed:
1491 del removed[frag_name]
1492 if frag_name not in updated:
1493 added[frag_name] = {}
1494 # copy fragment into destdir
1495 shutil.copy2(srcpath, destdir)
1496
1497 return (updated, added, removed)
1498
1499
1500def _determine_files_dir(rd):
1501 """Determine the appropriate files directory for a recipe"""
1502 recipedir = rd.getVar('FILE_DIRNAME')
1503 for entry in rd.getVar('FILESPATH').split(':'):
1504 relpth = os.path.relpath(entry, recipedir)
1505 if not os.sep in relpth:
1506 # One (or zero) levels below only, so we don't put anything in machine-specific directories
1507 if os.path.isdir(entry):
1508 return entry
1509 return os.path.join(recipedir, rd.getVar('BPN'))
1510
1511
1512def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None):
1513 """Implement the 'srcrev' mode of update-recipe"""
1514 import bb.process
1515 import oe.recipeutils
1516
1517 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
1518
1519 recipefile = rd.getVar('FILE')
1520 recipedir = os.path.basename(recipefile)
1521 logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix))
1522
1523 # Get original SRCREV
1524 old_srcrev = rd.getVar('SRCREV') or ''
1525 if old_srcrev == "INVALID":
1526 raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository')
1527 old_srcrev = {'.': old_srcrev}
1528
1529 # Get HEAD revision
1530 try:
1531 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
1532 except bb.process.ExecutionError as err:
1533 raise DevtoolError('Failed to get HEAD revision in %s: %s' %
1534 (srctree, err))
1535 srcrev = stdout.strip()
1536 if len(srcrev) != 40:
1537 raise DevtoolError('Invalid hash returned by git: %s' % stdout)
1538
1539 destpath = None
1540 remove_files = []
1541 patchfields = {}
1542 patchfields['SRCREV'] = srcrev
1543 orig_src_uri = rd.getVar('SRC_URI', False) or ''
1544 srcuri = orig_src_uri.split()
1545 tempdir = tempfile.mkdtemp(prefix='devtool')
1546 update_srcuri = False
1547 appendfile = None
1548 try:
1549 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1550 srctreebase = workspace[recipename]['srctreebase']
1551 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1552 removedentries = {}
1553 if not no_remove:
1554 # Find list of existing patches in recipe file
1555 patches_dir = tempfile.mkdtemp(dir=tempdir)
1556 upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
1557 patches_dir)
1558 logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
1559
1560 # Remove deleted local files and "overlapping" patches
1561 remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value]
1562 if remove_files:
1563 removedentries = _remove_file_entries(srcuri, remove_files)[0]
1564 update_srcuri = True
1565
1566 if appendlayerdir:
1567 files = dict((os.path.join(local_files_dir, key), val) for
1568 key, val in list(upd_f.items()) + list(new_f.items()))
1569 removevalues = {}
1570 if update_srcuri:
1571 removevalues = {'SRC_URI': removedentries}
1572 patchfields['SRC_URI'] = '\\\n '.join(srcuri)
1573 if dry_run_outdir:
1574 logger.info('Creating bbappend (dry-run)')
1575 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1576 rd, appendlayerdir, files, wildcardver=wildcard_version,
1577 extralines=patchfields, removevalues=removevalues,
1578 redirect_output=dry_run_outdir)
1579 else:
1580 files_dir = _determine_files_dir(rd)
1581 for basepath, param in upd_f.items():
1582 path = param['path']
1583 logger.info('Updating file %s%s' % (basepath, dry_run_suffix))
1584 if os.path.isabs(basepath):
1585 # Original file (probably with subdir pointing inside source tree)
1586 # so we do not want to move it, just copy
1587 _copy_file(basepath, path, dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1588 else:
1589 _move_file(os.path.join(local_files_dir, basepath), path,
1590 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1591 update_srcuri= True
1592 for basepath, param in new_f.items():
1593 path = param['path']
1594 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1595 _move_file(os.path.join(local_files_dir, basepath),
1596 os.path.join(files_dir, basepath),
1597 dry_run_outdir=dry_run_outdir,
1598 base_outdir=recipedir)
1599 srcuri.append('file://%s' % basepath)
1600 update_srcuri = True
1601 if update_srcuri:
1602 patchfields['SRC_URI'] = ' '.join(srcuri)
1603 ret = oe.recipeutils.patch_recipe(rd, recipefile, patchfields, redirect_output=dry_run_outdir)
1604 finally:
1605 shutil.rmtree(tempdir)
1606 if not 'git://' in orig_src_uri:
1607 logger.info('You will need to update SRC_URI within the recipe to '
1608 'point to a git repository where you have pushed your '
1609 'changes')
1610
1611 _remove_source_files(appendlayerdir, remove_files, destpath, no_report_remove, dry_run=dry_run_outdir)
1612 return True, appendfile, remove_files
1613
1614def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False):
1615 """Implement the 'patch' mode of update-recipe"""
1616 import oe.recipeutils
1617
1618 recipefile = rd.getVar('FILE')
1619 recipedir = os.path.dirname(recipefile)
1620 append = workspace[recipename]['bbappend']
1621 if not os.path.exists(append):
1622 raise DevtoolError('unable to find workspace bbappend for recipe %s' %
1623 recipename)
1624 srctreebase = workspace[recipename]['srctreebase']
1625 relpatchdir = os.path.relpath(srctreebase, srctree)
1626 if relpatchdir == '.':
1627 patchdir_params = {}
1628 else:
1629 patchdir_params = {'patchdir': relpatchdir}
1630
1631 def srcuri_entry(basepath, patchdir_params):
1632 if patchdir_params:
1633 paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items())
1634 else:
1635 paramstr = ''
1636 return 'file://%s%s' % (basepath, paramstr)
1637
1638 initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
1639 if not initial_revs:
1640 raise DevtoolError('Unable to find initial revision - please specify '
1641 'it with --initial-rev')
1642
1643 appendfile = None
1644 dl_dir = rd.getVar('DL_DIR')
1645 if not dl_dir.endswith('/'):
1646 dl_dir += '/'
1647
1648 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
1649
1650 tempdir = tempfile.mkdtemp(prefix='devtool')
1651 try:
1652 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1653 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1654
1655 # Get updated patches from source tree
1656 patches_dir = tempfile.mkdtemp(dir=tempdir)
1657 upd_p, new_p, _ = _export_patches(srctree, rd, update_revs,
1658 patches_dir, changed_revs)
1659 # Get all patches from source tree and check if any should be removed
1660 all_patches_dir = tempfile.mkdtemp(dir=tempdir)
1661 _, _, del_p = _export_patches(srctree, rd, initial_revs,
1662 all_patches_dir)
1663 logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
1664 if filter_patches:
1665 new_p = OrderedDict()
1666 upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
1667 del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches)
1668 remove_files = []
1669 if not no_remove:
1670 # Remove deleted local files and patches
1671 remove_files = list(del_f.values()) + list(del_p.values())
1672 updatefiles = False
1673 updaterecipe = False
1674 destpath = None
1675 srcuri = (rd.getVar('SRC_URI', False) or '').split()
1676
1677 if appendlayerdir:
1678 files = OrderedDict((os.path.join(local_files_dir, key), val) for
1679 key, val in list(upd_f.items()) + list(new_f.items()))
1680 files.update(OrderedDict((os.path.join(patches_dir, key), val) for
1681 key, val in list(upd_p.items()) + list(new_p.items())))
1682
1683 params = []
1684 for file, param in files.items():
1685 patchdir_param = dict(patchdir_params)
1686 patchdir = param.get('patchdir', ".")
1687 if patchdir != "." :
1688 if patchdir_param:
1689 patchdir_param['patchdir'] += patchdir
1690 else:
1691 patchdir_param['patchdir'] = patchdir
1692 params.append(patchdir_param)
1693
1694 if files or remove_files:
1695 removevalues = None
1696 if remove_files:
1697 removedentries, remaining = _remove_file_entries(
1698 srcuri, remove_files)
1699 if removedentries or remaining:
1700 remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for
1701 item in remaining]
1702 removevalues = {'SRC_URI': removedentries + remaining}
1703 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1704 rd, appendlayerdir, files,
1705 wildcardver=wildcard_version,
1706 removevalues=removevalues,
1707 redirect_output=dry_run_outdir,
1708 params=params)
1709 else:
1710 logger.info('No patches or local source files needed updating')
1711 else:
1712 # Update existing files
1713 files_dir = _determine_files_dir(rd)
1714 for basepath, param in upd_f.items():
1715 path = param['path']
1716 logger.info('Updating file %s' % basepath)
1717 if os.path.isabs(basepath):
1718 # Original file (probably with subdir pointing inside source tree)
1719 # so we do not want to move it, just copy
1720 _copy_file(basepath, path,
1721 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1722 else:
1723 _move_file(os.path.join(local_files_dir, basepath), path,
1724 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1725 updatefiles = True
1726 for basepath, param in upd_p.items():
1727 path = param['path']
1728 patchdir = param.get('patchdir', ".")
1729 patchdir_param = {}
1730 if patchdir != "." :
1731 patchdir_param = dict(patchdir_params)
1732 if patchdir_param:
1733 patchdir_param['patchdir'] += patchdir
1734 else:
1735 patchdir_param['patchdir'] = patchdir
1736 patchfn = os.path.join(patches_dir, patchdir, basepath)
1737 if os.path.dirname(path) + '/' == dl_dir:
1738 # This is a a downloaded patch file - we now need to
1739 # replace the entry in SRC_URI with our local version
1740 logger.info('Replacing remote patch %s with updated local version' % basepath)
1741 path = os.path.join(files_dir, basepath)
1742 _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param))
1743 updaterecipe = True
1744 else:
1745 logger.info('Updating patch %s%s' % (basepath, dry_run_suffix))
1746 _move_file(patchfn, path,
1747 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1748 updatefiles = True
1749 # Add any new files
1750 for basepath, param in new_f.items():
1751 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1752 _move_file(os.path.join(local_files_dir, basepath),
1753 os.path.join(files_dir, basepath),
1754 dry_run_outdir=dry_run_outdir,
1755 base_outdir=recipedir)
1756 srcuri.append(srcuri_entry(basepath, patchdir_params))
1757 updaterecipe = True
1758 for basepath, param in new_p.items():
1759 patchdir = param.get('patchdir', ".")
1760 logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix))
1761 _move_file(os.path.join(patches_dir, patchdir, basepath),
1762 os.path.join(files_dir, basepath),
1763 dry_run_outdir=dry_run_outdir,
1764 base_outdir=recipedir)
1765 params = dict(patchdir_params)
1766 if patchdir != "." :
1767 if params:
1768 params['patchdir'] += patchdir
1769 else:
1770 params['patchdir'] = patchdir
1771
1772 srcuri.append(srcuri_entry(basepath, params))
1773 updaterecipe = True
1774 # Update recipe, if needed
1775 if _remove_file_entries(srcuri, remove_files)[0]:
1776 updaterecipe = True
1777 if updaterecipe:
1778 if not dry_run_outdir:
1779 logger.info('Updating recipe %s' % os.path.basename(recipefile))
1780 ret = oe.recipeutils.patch_recipe(rd, recipefile,
1781 {'SRC_URI': ' '.join(srcuri)},
1782 redirect_output=dry_run_outdir)
1783 elif not updatefiles:
1784 # Neither patches nor recipe were updated
1785 logger.info('No patches or files need updating')
1786 return False, None, []
1787 finally:
1788 shutil.rmtree(tempdir)
1789
1790 _remove_source_files(appendlayerdir, remove_files, destpath, no_report_remove, dry_run=dry_run_outdir)
1791 return True, appendfile, remove_files
1792
1793def _guess_recipe_update_mode(srctree, rdata):
1794 """Guess the recipe update mode to use"""
1795 import bb.process
1796 src_uri = (rdata.getVar('SRC_URI') or '').split()
1797 git_uris = [uri for uri in src_uri if uri.startswith('git://')]
1798 if not git_uris:
1799 return 'patch'
1800 # Just use the first URI for now
1801 uri = git_uris[0]
1802 # Check remote branch
1803 params = bb.fetch.decodeurl(uri)[5]
1804 upstr_branch = params['branch'] if 'branch' in params else 'master'
1805 # Check if current branch HEAD is found in upstream branch
1806 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
1807 head_rev = stdout.rstrip()
1808 stdout, _ = bb.process.run('git branch -r --contains %s' % head_rev,
1809 cwd=srctree)
1810 remote_brs = [branch.strip() for branch in stdout.splitlines()]
1811 if 'origin/' + upstr_branch in remote_brs:
1812 return 'srcrev'
1813
1814 return 'patch'
1815
1816def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False):
1817 import bb.data
1818 import bb.process
1819 srctree = workspace[recipename]['srctree']
1820 if mode == 'auto':
1821 mode = _guess_recipe_update_mode(srctree, rd)
1822
1823 override_branches = []
1824 mainbranch = None
1825 startbranch = None
1826 if not no_overrides:
1827 stdout, _ = bb.process.run('git branch', cwd=srctree)
1828 other_branches = []
1829 for line in stdout.splitlines():
1830 branchname = line[2:]
1831 if line.startswith('* '):
1832 if 'HEAD' in line:
1833 raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"')
1834 startbranch = branchname
1835 if branchname.startswith(override_branch_prefix):
1836 override_branches.append(branchname)
1837 else:
1838 other_branches.append(branchname)
1839
1840 if override_branches:
1841 logger.debug('_update_recipe: override branches: %s' % override_branches)
1842 logger.debug('_update_recipe: other branches: %s' % other_branches)
1843 if startbranch.startswith(override_branch_prefix):
1844 if len(other_branches) == 1:
1845 mainbranch = other_branches[1]
1846 else:
1847 raise DevtoolError('Unable to determine main branch - please check out the main branch in source tree first')
1848 else:
1849 mainbranch = startbranch
1850
1851 checkedout = None
1852 anyupdated = False
1853 appendfile = None
1854 allremoved = []
1855 if override_branches:
1856 logger.info('Handling main branch (%s)...' % mainbranch)
1857 if startbranch != mainbranch:
1858 bb.process.run('git checkout %s' % mainbranch, cwd=srctree)
1859 checkedout = mainbranch
1860 try:
1861 branchlist = [mainbranch] + override_branches
1862 for branch in branchlist:
1863 crd = bb.data.createCopy(rd)
1864 if branch != mainbranch:
1865 logger.info('Handling branch %s...' % branch)
1866 override = branch[len(override_branch_prefix):]
1867 crd.appendVar('OVERRIDES', ':%s' % override)
1868 bb.process.run('git checkout %s' % branch, cwd=srctree)
1869 checkedout = branch
1870
1871 if mode == 'srcrev':
1872 updated, appendf, removed = _update_recipe_srcrev(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir)
1873 elif mode == 'patch':
1874 updated, appendf, removed = _update_recipe_patch(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir, force_patch_refresh)
1875 else:
1876 raise DevtoolError('update_recipe: invalid mode %s' % mode)
1877 if updated:
1878 anyupdated = True
1879 if appendf:
1880 appendfile = appendf
1881 allremoved.extend(removed)
1882 finally:
1883 if startbranch and checkedout != startbranch:
1884 bb.process.run('git checkout %s' % startbranch, cwd=srctree)
1885
1886 return anyupdated, appendfile, allremoved
1887
1888def update_recipe(args, config, basepath, workspace):
1889 """Entry point for the devtool 'update-recipe' subcommand"""
1890 check_workspace_recipe(workspace, args.recipename)
1891
1892 if args.append:
1893 if not os.path.exists(args.append):
1894 raise DevtoolError('bbappend destination layer directory "%s" '
1895 'does not exist' % args.append)
1896 if not os.path.exists(os.path.join(args.append, 'conf', 'layer.conf')):
1897 raise DevtoolError('conf/layer.conf not found in bbappend '
1898 'destination layer "%s"' % args.append)
1899
1900 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1901 try:
1902
1903 rd = parse_recipe(config, tinfoil, args.recipename, True)
1904 if not rd:
1905 return 1
1906
1907 dry_run_output = None
1908 dry_run_outdir = None
1909 if args.dry_run:
1910 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
1911 dry_run_outdir = dry_run_output.name
1912 updated, _, _ = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
1913
1914 if updated:
1915 rf = rd.getVar('FILE')
1916 if rf.startswith(config.workspace_path):
1917 logger.warning('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
1918 finally:
1919 tinfoil.shutdown()
1920
1921 return 0
1922
1923
1924def status(args, config, basepath, workspace):
1925 """Entry point for the devtool 'status' subcommand"""
1926 if workspace:
1927 for recipe, value in sorted(workspace.items()):
1928 recipefile = value['recipefile']
1929 if recipefile:
1930 recipestr = ' (%s)' % recipefile
1931 else:
1932 recipestr = ''
1933 print("%s: %s%s" % (recipe, value['srctree'], recipestr))
1934 else:
1935 logger.info('No recipes currently in your workspace - you can use "devtool modify" to work on an existing recipe or "devtool add" to add a new one')
1936 return 0
1937
1938
1939def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1940 """Reset one or more recipes"""
1941 import bb.process
1942 import oe.path
1943
1944 def clean_preferred_provider(pn, layerconf_path):
1945 """Remove PREFERRED_PROVIDER from layer.conf'"""
1946 import re
1947 layerconf_file = os.path.join(layerconf_path, 'conf', 'layer.conf')
1948 new_layerconf_file = os.path.join(layerconf_path, 'conf', '.layer.conf')
1949 pprovider_found = False
1950 with open(layerconf_file, 'r') as f:
1951 lines = f.readlines()
1952 with open(new_layerconf_file, 'a') as nf:
1953 for line in lines:
1954 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$'
1955 if not re.match(pprovider_exp, line):
1956 nf.write(line)
1957 else:
1958 pprovider_found = True
1959 if pprovider_found:
1960 shutil.move(new_layerconf_file, layerconf_file)
1961 else:
1962 os.remove(new_layerconf_file)
1963
1964 if recipes and not no_clean:
1965 if len(recipes) == 1:
1966 logger.info('Cleaning sysroot for recipe %s...' % recipes[0])
1967 else:
1968 logger.info('Cleaning sysroot for recipes %s...' % ', '.join(recipes))
1969 # If the recipe file itself was created in the workspace, and
1970 # it uses BBCLASSEXTEND, then we need to also clean the other
1971 # variants
1972 targets = []
1973 for recipe in recipes:
1974 targets.append(recipe)
1975 recipefile = workspace[recipe]['recipefile']
1976 if recipefile and os.path.exists(recipefile):
1977 targets.extend(get_bbclassextend_targets(recipefile, recipe))
1978 try:
1979 exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % ' '.join(targets))
1980 except bb.process.ExecutionError as e:
1981 raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you '
1982 'wish, you may specify -n/--no-clean to '
1983 'skip running this command when resetting' %
1984 (e.command, e.stdout))
1985
1986 for pn in recipes:
1987 _check_preserve(config, pn)
1988
1989 appendfile = workspace[pn]['bbappend']
1990 if os.path.exists(appendfile):
1991 # This shouldn't happen, but is possible if devtool errored out prior to
1992 # writing the md5 file. We need to delete this here or the recipe won't
1993 # actually be reset
1994 os.remove(appendfile)
1995
1996 preservepath = os.path.join(config.workspace_path, 'attic', pn, pn)
1997 def preservedir(origdir):
1998 if os.path.exists(origdir):
1999 for root, dirs, files in os.walk(origdir):
2000 for fn in files:
2001 logger.warning('Preserving %s in %s' % (fn, preservepath))
2002 _move_file(os.path.join(origdir, fn),
2003 os.path.join(preservepath, fn))
2004 for dn in dirs:
2005 preservedir(os.path.join(root, dn))
2006 os.rmdir(origdir)
2007
2008 recipefile = workspace[pn]['recipefile']
2009 if recipefile and oe.path.is_path_parent(config.workspace_path, recipefile):
2010 # This should always be true if recipefile is set, but just in case
2011 preservedir(os.path.dirname(recipefile))
2012 # We don't automatically create this dir next to appends, but the user can
2013 preservedir(os.path.join(config.workspace_path, 'appends', pn))
2014
2015 srctreebase = workspace[pn]['srctreebase']
2016 if os.path.isdir(srctreebase):
2017 if os.listdir(srctreebase):
2018 if remove_work:
2019 logger.info('-r argument used on %s, removing source tree.'
2020 ' You will lose any unsaved work' %pn)
2021 shutil.rmtree(srctreebase)
2022 else:
2023 # We don't want to risk wiping out any work in progress
2024 if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')):
2025 from datetime import datetime
2026 preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S")))
2027 logger.info('Preserving source tree in %s\nIf you no '
2028 'longer need it then please delete it manually.\n'
2029 'It is also possible to reuse it via devtool source tree argument.'
2030 % preservesrc)
2031 bb.utils.mkdirhier(os.path.dirname(preservesrc))
2032 shutil.move(srctreebase, preservesrc)
2033 else:
2034 logger.info('Leaving source tree %s as-is; if you no '
2035 'longer need it then please delete it manually'
2036 % srctreebase)
2037 else:
2038 # This is unlikely, but if it's empty we can just remove it
2039 os.rmdir(srctreebase)
2040
2041 clean_preferred_provider(pn, config.workspace_path)
2042
2043def reset(args, config, basepath, workspace):
2044 """Entry point for the devtool 'reset' subcommand"""
2045
2046 recipes = ""
2047
2048 if args.recipename:
2049 if args.all:
2050 raise DevtoolError("Recipe cannot be specified if -a/--all is used")
2051 else:
2052 for recipe in args.recipename:
2053 check_workspace_recipe(workspace, recipe, checksrc=False)
2054 elif not args.all:
2055 raise DevtoolError("Recipe must be specified, or specify -a/--all to "
2056 "reset all recipes")
2057 if args.all:
2058 recipes = list(workspace.keys())
2059 else:
2060 recipes = args.recipename
2061
2062 _reset(recipes, args.no_clean, args.remove_work, config, basepath, workspace)
2063
2064 return 0
2065
2066
2067def _get_layer(layername, d):
2068 """Determine the base layer path for the specified layer name/path"""
2069 layerdirs = d.getVar('BBLAYERS').split()
2070 layers = {} # {basename: layer_paths}
2071 for p in layerdirs:
2072 bn = os.path.basename(p)
2073 if bn not in layers:
2074 layers[bn] = [p]
2075 else:
2076 layers[bn].append(p)
2077 # Provide some shortcuts
2078 if layername.lower() in ['oe-core', 'openembedded-core']:
2079 layername = 'meta'
2080 layer_paths = layers.get(layername, None)
2081 if not layer_paths:
2082 return os.path.abspath(layername)
2083 elif len(layer_paths) == 1:
2084 return os.path.abspath(layer_paths[0])
2085 else:
2086 # multiple layers having the same base name
2087 logger.warning("Multiple layers have the same base name '%s', use the first one '%s'." % (layername, layer_paths[0]))
2088 logger.warning("Consider using path instead of base name to specify layer:\n\t\t%s" % '\n\t\t'.join(layer_paths))
2089 return os.path.abspath(layer_paths[0])
2090
2091
2092def finish(args, config, basepath, workspace):
2093 """Entry point for the devtool 'finish' subcommand"""
2094 import bb
2095 import oe.recipeutils
2096
2097 check_workspace_recipe(workspace, args.recipename)
2098
2099 dry_run_suffix = ' (dry-run)' if args.dry_run else ''
2100
2101 # Grab the equivalent of COREBASE without having to initialise tinfoil
2102 corebasedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
2103
2104 srctree = workspace[args.recipename]['srctree']
2105 check_git_repo_op(srctree, [corebasedir])
2106 dirty = check_git_repo_dirty(srctree)
2107 if dirty:
2108 if args.force:
2109 logger.warning('Source tree is not clean, continuing as requested by -f/--force')
2110 else:
2111 raise DevtoolError('Source tree is not clean:\n\n%s\nEnsure you have committed your changes or use -f/--force if you are sure there\'s nothing that needs to be committed' % dirty)
2112
2113 no_clean = args.no_clean
2114 remove_work=args.remove_work
2115 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
2116 try:
2117 rd = parse_recipe(config, tinfoil, args.recipename, True)
2118 if not rd:
2119 return 1
2120
2121 destlayerdir = _get_layer(args.destination, tinfoil.config_data)
2122 recipefile = rd.getVar('FILE')
2123 recipedir = os.path.dirname(recipefile)
2124 origlayerdir = oe.recipeutils.find_layerdir(recipefile)
2125
2126 if not os.path.isdir(destlayerdir):
2127 raise DevtoolError('Unable to find layer or directory matching "%s"' % args.destination)
2128
2129 if os.path.abspath(destlayerdir) == config.workspace_path:
2130 raise DevtoolError('"%s" specifies the workspace layer - that is not a valid destination' % args.destination)
2131
2132 # If it's an upgrade, grab the original path
2133 origpath = None
2134 origfilelist = None
2135 append = workspace[args.recipename]['bbappend']
2136 with open(append, 'r') as f:
2137 for line in f:
2138 if line.startswith('# original_path:'):
2139 origpath = line.split(':')[1].strip()
2140 elif line.startswith('# original_files:'):
2141 origfilelist = line.split(':')[1].split()
2142
2143 destlayerbasedir = oe.recipeutils.find_layerdir(destlayerdir)
2144
2145 if origlayerdir == config.workspace_path:
2146 # Recipe file itself is in workspace, update it there first
2147 appendlayerdir = None
2148 origrelpath = None
2149 if origpath:
2150 origlayerpath = oe.recipeutils.find_layerdir(origpath)
2151 if origlayerpath:
2152 origrelpath = os.path.relpath(origpath, origlayerpath)
2153 destpath = oe.recipeutils.get_bbfile_path(rd, destlayerdir, origrelpath)
2154 if not destpath:
2155 raise DevtoolError("Unable to determine destination layer path - check that %s specifies an actual layer and %s/conf/layer.conf specifies BBFILES. You may also need to specify a more complete path." % (args.destination, destlayerdir))
2156 # Warn if the layer isn't in bblayers.conf (the code to create a bbappend will do this in other cases)
2157 layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
2158 if not os.path.abspath(destlayerbasedir) in layerdirs:
2159 bb.warn('Specified destination layer is not currently enabled in bblayers.conf, so the %s recipe will now be unavailable in your current configuration until you add the layer there' % args.recipename)
2160
2161 elif destlayerdir == origlayerdir:
2162 # Same layer, update the original recipe
2163 appendlayerdir = None
2164 destpath = None
2165 else:
2166 # Create/update a bbappend in the specified layer
2167 appendlayerdir = destlayerdir
2168 destpath = None
2169
2170 # Actually update the recipe / bbappend
2171 removing_original = (origpath and origfilelist and oe.recipeutils.find_layerdir(origpath) == destlayerbasedir)
2172 dry_run_output = None
2173 dry_run_outdir = None
2174 if args.dry_run:
2175 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
2176 dry_run_outdir = dry_run_output.name
2177 updated, appendfile, removed = _update_recipe(args.recipename, workspace, rd, args.mode, appendlayerdir, wildcard_version=True, no_remove=False, no_report_remove=removing_original, initial_rev=args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
2178 removed = [os.path.relpath(pth, recipedir) for pth in removed]
2179
2180 # Remove any old files in the case of an upgrade
2181 if removing_original:
2182 for fn in origfilelist:
2183 fnp = os.path.join(origpath, fn)
2184 if fn in removed or not os.path.exists(os.path.join(recipedir, fn)):
2185 logger.info('Removing file %s%s' % (fnp, dry_run_suffix))
2186 if not args.dry_run:
2187 try:
2188 os.remove(fnp)
2189 except FileNotFoundError:
2190 pass
2191
2192 if origlayerdir == config.workspace_path and destpath:
2193 # Recipe file itself is in the workspace - need to move it and any
2194 # associated files to the specified layer
2195 no_clean = True
2196 logger.info('Moving recipe file to %s%s' % (destpath, dry_run_suffix))
2197 for root, _, files in os.walk(recipedir):
2198 for fn in files:
2199 srcpath = os.path.join(root, fn)
2200 relpth = os.path.relpath(os.path.dirname(srcpath), recipedir)
2201 destdir = os.path.abspath(os.path.join(destpath, relpth))
2202 destfp = os.path.join(destdir, fn)
2203 _move_file(srcpath, destfp, dry_run_outdir=dry_run_outdir, base_outdir=destpath)
2204
2205 if dry_run_outdir:
2206 import difflib
2207 comparelist = []
2208 for root, _, files in os.walk(dry_run_outdir):
2209 for fn in files:
2210 outf = os.path.join(root, fn)
2211 relf = os.path.relpath(outf, dry_run_outdir)
2212 logger.debug('dry-run: output file %s' % relf)
2213 if fn.endswith('.bb'):
2214 if origfilelist and origpath and destpath:
2215 # Need to match this up with the pre-upgrade recipe file
2216 for origf in origfilelist:
2217 if origf.endswith('.bb'):
2218 comparelist.append((os.path.abspath(os.path.join(origpath, origf)),
2219 outf,
2220 os.path.abspath(os.path.join(destpath, relf))))
2221 break
2222 else:
2223 # Compare to the existing recipe
2224 comparelist.append((recipefile, outf, recipefile))
2225 elif fn.endswith('.bbappend'):
2226 if appendfile:
2227 if os.path.exists(appendfile):
2228 comparelist.append((appendfile, outf, appendfile))
2229 else:
2230 comparelist.append((None, outf, appendfile))
2231 else:
2232 if destpath:
2233 recipedest = destpath
2234 elif appendfile:
2235 recipedest = os.path.dirname(appendfile)
2236 else:
2237 recipedest = os.path.dirname(recipefile)
2238 destfp = os.path.join(recipedest, relf)
2239 if os.path.exists(destfp):
2240 comparelist.append((destfp, outf, destfp))
2241 output = ''
2242 for oldfile, newfile, newfileshow in comparelist:
2243 if oldfile:
2244 with open(oldfile, 'r') as f:
2245 oldlines = f.readlines()
2246 else:
2247 oldfile = '/dev/null'
2248 oldlines = []
2249 with open(newfile, 'r') as f:
2250 newlines = f.readlines()
2251 if not newfileshow:
2252 newfileshow = newfile
2253 diff = difflib.unified_diff(oldlines, newlines, oldfile, newfileshow)
2254 difflines = list(diff)
2255 if difflines:
2256 output += ''.join(difflines)
2257 if output:
2258 logger.info('Diff of changed files:\n%s' % output)
2259 finally:
2260 tinfoil.shutdown()
2261
2262 # Everything else has succeeded, we can now reset
2263 if args.dry_run:
2264 logger.info('Resetting recipe (dry-run)')
2265 else:
2266 _reset([args.recipename], no_clean=no_clean, remove_work=remove_work, config=config, basepath=basepath, workspace=workspace)
2267
2268 return 0
2269
2270
2271def get_default_srctree(config, recipename=''):
2272 """Get the default srctree path"""
2273 srctreeparent = config.get('General', 'default_source_parent_dir', config.workspace_path)
2274 if recipename:
2275 return os.path.join(srctreeparent, 'sources', recipename)
2276 else:
2277 return os.path.join(srctreeparent, 'sources')
2278
2279def register_commands(subparsers, context):
2280 """Register devtool subcommands from this plugin"""
2281
2282 defsrctree = get_default_srctree(context.config)
2283 parser_add = subparsers.add_parser('add', help='Add a new recipe',
2284 description='Adds a new recipe to the workspace to build a specified source tree. Can optionally fetch a remote URI and unpack it to create the source tree.',
2285 group='starting', order=100)
2286 parser_add.add_argument('recipename', nargs='?', help='Name for new recipe to add (just name - no version, path or extension). If not specified, will attempt to auto-detect it.')
2287 parser_add.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
2288 parser_add.add_argument('fetchuri', nargs='?', help='Fetch the specified URI and extract it to create the source tree')
2289 group = parser_add.add_mutually_exclusive_group()
2290 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
2291 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
2292 parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
2293 parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
2294 parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true")
2295 parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
2296 parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
2297 group = parser_add.add_mutually_exclusive_group()
2298 group.add_argument('--srcrev', '-S', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
2299 group.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
2300 parser_add.add_argument('--srcbranch', '-B', help='Branch in source repository if fetching from an SCM such as git (default master)')
2301 parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure). Useful with binary packages e.g. RPMs.', action='store_true')
2302 parser_add.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
2303 parser_add.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
2304 parser_add.add_argument('--mirrors', help='Enable PREMIRRORS and MIRRORS for source tree fetching (disable by default).', action="store_true")
2305 parser_add.add_argument('--provides', '-p', help='Specify an alias for the item provided by the recipe. E.g. virtual/libgl')
2306 parser_add.set_defaults(func=add, fixed_setup=context.fixed_setup)
2307
2308 parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
2309 description='Sets up the build environment to modify the source for an existing recipe. The default behaviour is to extract the source being fetched by the recipe into a git tree so you can work on it; alternatively if you already have your own pre-prepared source tree you can specify -n/--no-extract.',
2310 group='starting', order=90)
2311 parser_modify.add_argument('recipename', help='Name of existing recipe to edit (just name - no version, path or extension)')
2312 parser_modify.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
2313 parser_modify.add_argument('--wildcard', '-w', action="store_true", help='Use wildcard for unversioned bbappend')
2314 group = parser_modify.add_mutually_exclusive_group()
2315 group.add_argument('--extract', '-x', action="store_true", help='Extract source for recipe (default)')
2316 group.add_argument('--no-extract', '-n', action="store_true", help='Do not extract source, expect it to exist')
2317 group = parser_modify.add_mutually_exclusive_group()
2318 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
2319 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
2320 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
2321 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2322 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
2323 parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe')
2324 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup)
2325
2326 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
2327 description='Extracts the source for an existing recipe',
2328 group='advanced')
2329 parser_extract.add_argument('recipename', help='Name of recipe to extract the source for')
2330 parser_extract.add_argument('srctree', help='Path to where to extract the source tree')
2331 parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (default "%(default)s")')
2332 parser_extract.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2333 parser_extract.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
2334 parser_extract.set_defaults(func=extract, fixed_setup=context.fixed_setup)
2335
2336 parser_sync = subparsers.add_parser('sync', help='Synchronize the source tree for an existing recipe',
2337 description='Synchronize the previously extracted source tree for an existing recipe',
2338 formatter_class=argparse.ArgumentDefaultsHelpFormatter,
2339 group='advanced')
2340 parser_sync.add_argument('recipename', help='Name of recipe to sync the source for')
2341 parser_sync.add_argument('srctree', help='Path to the source tree')
2342 parser_sync.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
2343 parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
2344 parser_sync.set_defaults(func=sync, fixed_setup=context.fixed_setup)
2345
2346 parser_rename = subparsers.add_parser('rename', help='Rename a recipe file in the workspace',
2347 description='Renames the recipe file for a recipe in the workspace, changing the name or version part or both, ensuring that all references within the workspace are updated at the same time. Only works when the recipe file itself is in the workspace, e.g. after devtool add. Particularly useful when devtool add did not automatically determine the correct name.',
2348 group='working', order=10)
2349 parser_rename.add_argument('recipename', help='Current name of recipe to rename')
2350 parser_rename.add_argument('newname', nargs='?', help='New name for recipe (optional, not needed if you only want to change the version)')
2351 parser_rename.add_argument('--version', '-V', help='Change the version (NOTE: this does not change the version fetched by the recipe, just the version in the recipe file name)')
2352 parser_rename.add_argument('--no-srctree', '-s', action='store_true', help='Do not rename the source tree directory (if the default source tree path has been used) - keeping the old name may be desirable if there are internal/other external references to this path')
2353 parser_rename.set_defaults(func=rename)
2354
2355 parser_update_recipe = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe',
2356 description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV). Note that these changes need to have been committed to the git repository in order to be recognised.',
2357 group='working', order=-90)
2358 parser_update_recipe.add_argument('recipename', help='Name of recipe to update')
2359 parser_update_recipe.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
2360 parser_update_recipe.add_argument('--initial-rev', help='Override starting revision for patches')
2361 parser_update_recipe.add_argument('--append', '-a', help='Write changes to a bbappend in the specified layer instead of the recipe', metavar='LAYERDIR')
2362 parser_update_recipe.add_argument('--wildcard-version', '-w', help='In conjunction with -a/--append, use a wildcard to make the bbappend apply to any recipe version', action='store_true')
2363 parser_update_recipe.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
2364 parser_update_recipe.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
2365 parser_update_recipe.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
2366 parser_update_recipe.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
2367 parser_update_recipe.set_defaults(func=update_recipe)
2368
2369 parser_status = subparsers.add_parser('status', help='Show workspace status',
2370 description='Lists recipes currently in your workspace and the paths to their respective external source trees',
2371 group='info', order=100)
2372 parser_status.set_defaults(func=status)
2373
2374 parser_reset = subparsers.add_parser('reset', help='Remove a recipe from your workspace',
2375 description='Removes the specified recipe(s) from your workspace (resetting its state back to that defined by the metadata).',
2376 group='working', order=-100)
2377 parser_reset.add_argument('recipename', nargs='*', help='Recipe to reset')
2378 parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
2379 parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
2380 parser_reset.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory along with append')
2381 parser_reset.set_defaults(func=reset)
2382
2383 parser_finish = subparsers.add_parser('finish', help='Finish working on a recipe in your workspace',
2384 description='Pushes any committed changes to the specified recipe to the specified layer and removes it from your workspace. Roughly equivalent to an update-recipe followed by reset, except the update-recipe step will do the "right thing" depending on the recipe and the destination layer specified. Note that your changes must have been committed to the git repository in order to be recognised.',
2385 group='working', order=-100)
2386 parser_finish.add_argument('recipename', help='Recipe to finish')
2387 parser_finish.add_argument('destination', help='Layer/path to put recipe into. Can be the name of a layer configured in your bblayers.conf, the path to the base of a layer, or a partial path inside a layer. %(prog)s will attempt to complete the path based on the layer\'s structure.')
2388 parser_finish.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
2389 parser_finish.add_argument('--initial-rev', help='Override starting revision for patches')
2390 parser_finish.add_argument('--force', '-f', action="store_true", help='Force continuing even if there are uncommitted changes in the source tree repository')
2391 parser_finish.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory under workspace')
2392 parser_finish.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
2393 parser_finish.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
2394 parser_finish.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
2395 parser_finish.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
2396 parser_finish.set_defaults(func=finish)
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
deleted file mode 100644
index 0dace1fb24..0000000000
--- a/scripts/lib/devtool/upgrade.py
+++ /dev/null
@@ -1,715 +0,0 @@
1# Development tool - upgrade command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool upgrade plugin"""
8
9import os
10import sys
11import re
12import shutil
13import tempfile
14import logging
15import argparse
16import scriptutils
17import errno
18import bb
19
20devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib'
21sys.path = sys.path + [devtool_path]
22
23import oe.recipeutils
24from devtool import standard
25from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs, check_prerelease_version
26
27logger = logging.getLogger('devtool')
28
29def _run(cmd, cwd=''):
30 logger.debug("Running command %s> %s" % (cwd,cmd))
31 return bb.process.run('%s' % cmd, cwd=cwd)
32
33def _get_srctree(tmpdir):
34 srctree = tmpdir
35 dirs = os.listdir(tmpdir)
36 if len(dirs) == 1:
37 srctree = os.path.join(tmpdir, dirs[0])
38 else:
39 raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
40 return srctree
41
42def _copy_source_code(orig, dest):
43 for path in standard._ls_tree(orig):
44 dest_dir = os.path.join(dest, os.path.dirname(path))
45 bb.utils.mkdirhier(dest_dir)
46 dest_path = os.path.join(dest, path)
47 shutil.move(os.path.join(orig, path), dest_path)
48
49def _remove_patch_dirs(recipefolder):
50 for root, dirs, files in os.walk(recipefolder):
51 for d in dirs:
52 shutil.rmtree(os.path.join(root,d))
53
54def _recipe_contains(rd, var):
55 rf = rd.getVar('FILE')
56 varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
57 for var, fn in varfiles.items():
58 if fn and fn.startswith(os.path.dirname(rf) + os.sep):
59 return True
60 return False
61
62def _rename_recipe_dirs(oldpv, newpv, path):
63 for root, dirs, files in os.walk(path):
64 # Rename directories with the version in their name
65 for olddir in dirs:
66 if olddir.find(oldpv) != -1:
67 newdir = olddir.replace(oldpv, newpv)
68 if olddir != newdir:
69 shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
70 # Rename any inc files with the version in their name (unusual, but possible)
71 for oldfile in files:
72 if oldfile.endswith('.inc'):
73 if oldfile.find(oldpv) != -1:
74 newfile = oldfile.replace(oldpv, newpv)
75 if oldfile != newfile:
76 bb.utils.rename(os.path.join(path, oldfile),
77 os.path.join(path, newfile))
78
79def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path):
80 oldrecipe = os.path.basename(oldrecipe)
81 if oldrecipe.endswith('_%s.bb' % oldpv):
82 newrecipe = '%s_%s.bb' % (pn, newpv)
83 if oldrecipe != newrecipe:
84 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
85 else:
86 newrecipe = oldrecipe
87 return os.path.join(path, newrecipe)
88
89def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path):
90 _rename_recipe_dirs(oldpv, newpv, path)
91 return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path)
92
93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
94 """Writes an append file"""
95 if not os.path.exists(rc):
96 raise DevtoolError("bbappend not created because %s does not exist" % rc)
97
98 appendpath = os.path.join(workspace, 'appends')
99 if not os.path.exists(appendpath):
100 bb.utils.mkdirhier(appendpath)
101
102 brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
103
104 srctree = os.path.abspath(srctree)
105 pn = d.getVar('PN')
106 af = os.path.join(appendpath, '%s.bbappend' % brf)
107 with open(af, 'w') as f:
108 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
109 # Local files can be modified/tracked in separate subdir under srctree
110 # Mostly useful for packages with S != WORKDIR
111 f.write('FILESPATH:prepend := "%s:"\n' %
112 os.path.join(srctreebase, 'oe-local-files'))
113 f.write('# srctreebase: %s\n' % srctreebase)
114 f.write('inherit externalsrc\n')
115 f.write(('# NOTE: We use pn- overrides here to avoid affecting'
116 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
117 f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
118 b_is_s = use_external_build(same_dir, no_same_dir, d)
119 if b_is_s:
120 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
121 f.write('\n')
122 if revs:
123 for name, rev in revs.items():
124 f.write('# initial_rev %s: %s\n' % (name, rev))
125 if copied:
126 f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
127 f.write('# original_files: %s\n' % ' '.join(copied))
128 return af
129
130def _cleanup_on_error(rd, srctree):
131 if os.path.exists(rd):
132 shutil.rmtree(rd)
133 srctree = os.path.abspath(srctree)
134 if os.path.exists(srctree):
135 shutil.rmtree(srctree)
136
137def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
138 if not keep_failure:
139 _cleanup_on_error(rd, srctree)
140 logger.error(e)
141 if extramsg:
142 logger.error(extramsg)
143 if keep_failure:
144 logger.info('Preserving failed upgrade files (--keep-failure)')
145 sys.exit(1)
146
147def _get_uri(rd):
148 srcuris = rd.getVar('SRC_URI').split()
149 if not len(srcuris):
150 raise DevtoolError('SRC_URI not found on recipe')
151 # Get first non-local entry in SRC_URI - usually by convention it's
152 # the first entry, but not always!
153 srcuri = None
154 for entry in srcuris:
155 if not entry.startswith('file://'):
156 srcuri = entry
157 break
158 if not srcuri:
159 raise DevtoolError('Unable to find non-local entry in SRC_URI')
160 srcrev = '${AUTOREV}'
161 if '://' in srcuri:
162 # Fetch a URL
163 rev_re = re.compile(';rev=([^;]+)')
164 res = rev_re.search(srcuri)
165 if res:
166 srcrev = res.group(1)
167 srcuri = rev_re.sub('', srcuri)
168 return srcuri, srcrev
169
170def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
171 """Extract sources of a recipe with a new version"""
172 import oe.patch
173
174 def __run(cmd):
175 """Simple wrapper which calls _run with srctree as cwd"""
176 return _run(cmd, srctree)
177
178 crd = rd.createCopy()
179
180 pv = crd.getVar('PV')
181 crd.setVar('PV', newpv)
182
183 tmpsrctree = None
184 uri, rev = _get_uri(crd)
185 if srcrev:
186 rev = srcrev
187 paths = [srctree]
188 if uri.startswith('git://') or uri.startswith('gitsm://'):
189 __run('git fetch')
190 __run('git checkout %s' % rev)
191 __run('git tag -f --no-sign devtool-base-new')
192 __run('git submodule update --recursive')
193 __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'')
194 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
195 paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
196 checksums = {}
197 _, _, _, _, _, params = bb.fetch2.decodeurl(uri)
198 srcsubdir_rel = params.get('destsuffix', 'git')
199 if not srcbranch:
200 check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
201 get_branch = [x.strip() for x in check_branch.splitlines()]
202 # Remove HEAD reference point and drop remote prefix
203 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
204 if len(get_branch) == 1:
205 # If srcrev is on only ONE branch, then use that branch
206 srcbranch = get_branch[0]
207 elif 'main' in get_branch:
208 # If srcrev is on multiple branches, then choose 'main' if it is one of them
209 srcbranch = 'main'
210 elif 'master' in get_branch:
211 # Otherwise choose 'master' if it is one of the branches
212 srcbranch = 'master'
213 else:
214 # If get_branch contains more than one objects, then display error and exit.
215 mbrch = '\n ' + '\n '.join(get_branch)
216 raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
217 else:
218 __run('git checkout devtool-base -b devtool-%s' % newpv)
219
220 tmpdir = tempfile.mkdtemp(prefix='devtool')
221 try:
222 checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
223 except scriptutils.FetchUrlFailure as e:
224 raise DevtoolError(e)
225
226 if ftmpdir and keep_temp:
227 logger.info('Fetch temp directory is %s' % ftmpdir)
228
229 tmpsrctree = _get_srctree(tmpdir)
230 srctree = os.path.abspath(srctree)
231 srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
232
233 # Delete all sources so we ensure no stray files are left over
234 for item in os.listdir(srctree):
235 if item in ['.git', 'oe-local-files']:
236 continue
237 itempath = os.path.join(srctree, item)
238 if os.path.isdir(itempath):
239 shutil.rmtree(itempath)
240 else:
241 os.remove(itempath)
242
243 # Copy in new ones
244 _copy_source_code(tmpsrctree, srctree)
245
246 (stdout,_) = __run('git ls-files --modified --others')
247 filelist = stdout.splitlines()
248 pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist))
249 pbar.start()
250 batchsize = 100
251 for i in range(0, len(filelist), batchsize):
252 batch = filelist[i:i+batchsize]
253 __run('git add -f -A %s' % ' '.join(['"%s"' % item for item in batch]))
254 pbar.update(i)
255 pbar.finish()
256
257 useroptions = []
258 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
259 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
260 __run('git tag -f --no-sign devtool-base-%s' % newpv)
261
262 revs = {}
263 for path in paths:
264 (stdout, _) = _run('git rev-parse HEAD', cwd=path)
265 revs[os.path.relpath(path, srctree)] = stdout.rstrip()
266
267 if no_patch:
268 patches = oe.recipeutils.get_recipe_patches(crd)
269 if patches:
270 logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
271 else:
272 for path in paths:
273 _run('git checkout devtool-patched -b %s' % branch, cwd=path)
274 (stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
275 branches_to_rebase = [branch] + stdout.split()
276 target_branch = revs[os.path.relpath(path, srctree)]
277
278 # There is a bug (or feature?) in git rebase where if a commit with
279 # a note is fully rebased away by being part of an old commit, the
280 # note is still attached to the old commit. Avoid this by making
281 # sure all old devtool related commits have a note attached to them
282 # (this assumes git config notes.rewriteMode is set to ignore).
283 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
284 for rev in stdout.splitlines():
285 if not oe.patch.GitApplyTree.getNotes(path, rev):
286 oe.patch.GitApplyTree.addNote(path, rev, "dummy")
287
288 for b in branches_to_rebase:
289 logger.info("Rebasing {} onto {}".format(b, target_branch))
290 _run('git checkout %s' % b, cwd=path)
291 try:
292 _run('git rebase %s' % target_branch, cwd=path)
293 except bb.process.ExecutionError as e:
294 if 'conflict' in e.stdout:
295 logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
296 _run('git rebase --abort', cwd=path)
297 else:
298 logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
299
300 # Remove any dummy notes added above.
301 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
302 for rev in stdout.splitlines():
303 oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
304
305 _run('git checkout %s' % branch, cwd=path)
306
307 if tmpsrctree:
308 if keep_temp:
309 logger.info('Preserving temporary directory %s' % tmpsrctree)
310 else:
311 shutil.rmtree(tmpsrctree)
312 if tmpdir != tmpsrctree:
313 shutil.rmtree(tmpdir)
314
315 return (revs, checksums, srcbranch, srcsubdir_rel)
316
317def _add_license_diff_to_recipe(path, diff):
318 notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
319# The following is the difference between the old and the new license text.
320# Please update the LICENSE value if needed, and summarize the changes in
321# the commit message via 'License-Update:' tag.
322# (example: 'License-Update: copyright years updated.')
323#
324# The changes:
325#
326"""
327 commented_diff = "\n".join(["# {}".format(l) for l in diff.split('\n')])
328 with open(path, 'rb') as f:
329 orig_content = f.read()
330 with open(path, 'wb') as f:
331 f.write(notice_text.encode())
332 f.write(commented_diff.encode())
333 f.write("\n#\n\n".encode())
334 f.write(orig_content)
335
336def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
337 """Creates the new recipe under workspace"""
338
339 pn = rd.getVar('PN')
340 path = os.path.join(workspace, 'recipes', pn)
341 bb.utils.mkdirhier(path)
342 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
343 if not copied:
344 raise DevtoolError('Internal error - no files were copied for recipe %s' % pn)
345 logger.debug('Copied %s to %s' % (copied, path))
346
347 oldpv = rd.getVar('PV')
348 if not newpv:
349 newpv = oldpv
350 origpath = rd.getVar('FILE')
351 fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path)
352 logger.debug('Upgraded %s => %s' % (origpath, fullpath))
353
354 newvalues = {}
355 if _recipe_contains(rd, 'PV') and newpv != oldpv:
356 newvalues['PV'] = newpv
357
358 if srcrev:
359 newvalues['SRCREV'] = srcrev
360
361 if srcbranch:
362 src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
363 changed = False
364 replacing = True
365 new_src_uri = []
366 for entry in src_uri:
367 try:
368 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
369 except bb.fetch2.MalformedUrl as e:
370 raise DevtoolError("Could not decode SRC_URI: {}".format(e))
371 if replacing and scheme in ['git', 'gitsm']:
372 branch = params.get('branch', 'master')
373 if rd.expand(branch) != srcbranch:
374 # Handle case where branch is set through a variable
375 res = re.match(r'\$\{([^}@]+)\}', branch)
376 if res:
377 newvalues[res.group(1)] = srcbranch
378 # We know we won't change SRC_URI now, so break out
379 break
380 else:
381 params['branch'] = srcbranch
382 entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
383 changed = True
384 replacing = False
385 new_src_uri.append(entry)
386 if changed:
387 newvalues['SRC_URI'] = ' '.join(new_src_uri)
388
389 newvalues['PR'] = None
390
391 # Work out which SRC_URI entries have changed in case the entry uses a name
392 crd = rd.createCopy()
393 crd.setVar('PV', newpv)
394 for var, value in newvalues.items():
395 crd.setVar(var, value)
396 old_src_uri = (rd.getVar('SRC_URI') or '').split()
397 new_src_uri = (crd.getVar('SRC_URI') or '').split()
398 newnames = []
399 addnames = []
400 for newentry in new_src_uri:
401 _, _, _, _, _, params = bb.fetch2.decodeurl(newentry)
402 if 'name' in params:
403 newnames.append(params['name'])
404 if newentry not in old_src_uri:
405 addnames.append(params['name'])
406 # Find what's been set in the original recipe
407 oldnames = []
408 oldsums = []
409 noname = False
410 for varflag in rd.getVarFlags('SRC_URI'):
411 for checksum in checksums:
412 if varflag.endswith('.' + checksum):
413 name = varflag.rsplit('.', 1)[0]
414 if name not in oldnames:
415 oldnames.append(name)
416 oldsums.append(checksum)
417 elif varflag == checksum:
418 noname = True
419 oldsums.append(checksum)
420 # Even if SRC_URI has named entries it doesn't have to actually use the name
421 if noname and addnames and addnames[0] not in oldnames:
422 addnames = []
423 # Drop any old names (the name actually might include ${PV})
424 for name in oldnames:
425 if name not in newnames:
426 for checksum in oldsums:
427 newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
428
429 nameprefix = '%s.' % addnames[0] if addnames else ''
430
431 # md5sum is deprecated, remove any traces of it. If it was the only old
432 # checksum, then replace it with the default checksums.
433 if 'md5sum' in oldsums:
434 newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
435 oldsums.remove('md5sum')
436 if not oldsums:
437 oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
438
439 for checksum in oldsums:
440 newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
441
442 if srcsubdir_new != srcsubdir_old:
443 s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
444 s_subdir_new = os.path.relpath(os.path.abspath(crd.getVar('S')), crd.getVar('WORKDIR'))
445 if srcsubdir_old == s_subdir_old and srcsubdir_new != s_subdir_new:
446 # Subdir for old extracted source matches what S points to (it should!)
447 # but subdir for new extracted source doesn't match what S will be
448 newvalues['S'] = '${WORKDIR}/%s' % srcsubdir_new.replace(newpv, '${PV}')
449 if crd.expand(newvalues['S']) == crd.expand('${WORKDIR}/${BP}'):
450 # It's the default, drop it
451 # FIXME what if S is being set in a .inc?
452 newvalues['S'] = None
453 logger.info('Source subdirectory has changed, dropping S value since it now matches the default ("${WORKDIR}/${BP}")')
454 else:
455 logger.info('Source subdirectory has changed, updating S value')
456
457 if license_diff:
458 newlicchksum = " ".join(["file://{}".format(l['path']) +
459 (";beginline={}".format(l['beginline']) if l['beginline'] else "") +
460 (";endline={}".format(l['endline']) if l['endline'] else "") +
461 (";md5={}".format(l['actual_md5'])) for l in new_licenses])
462 newvalues["LIC_FILES_CHKSUM"] = newlicchksum
463 _add_license_diff_to_recipe(fullpath, license_diff)
464
465 tinfoil.modified_files()
466 try:
467 rd = tinfoil.parse_recipe_file(fullpath, False)
468 except bb.tinfoil.TinfoilCommandFailed as e:
469 _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
470 oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
471
472 return fullpath, copied
473
474
475def _check_git_config():
476 def getconfig(name):
477 try:
478 value = bb.process.run('git config %s' % name)[0].strip()
479 except bb.process.ExecutionError as e:
480 if e.exitcode == 1:
481 value = None
482 else:
483 raise
484 return value
485
486 username = getconfig('user.name')
487 useremail = getconfig('user.email')
488 configerr = []
489 if not username:
490 configerr.append('Please set your name using:\n git config --global user.name')
491 if not useremail:
492 configerr.append('Please set your email using:\n git config --global user.email')
493 if configerr:
494 raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
495
496def _extract_licenses(srcpath, recipe_licenses):
497 licenses = []
498 for url in recipe_licenses.split():
499 license = {}
500 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
501 license['path'] = path
502 license['md5'] = parm.get('md5', '')
503 license['beginline'], license['endline'] = 0, 0
504 if 'beginline' in parm:
505 license['beginline'] = int(parm['beginline'])
506 if 'endline' in parm:
507 license['endline'] = int(parm['endline'])
508 license['text'] = []
509 with open(os.path.join(srcpath, path), 'rb') as f:
510 import hashlib
511 actual_md5 = hashlib.md5()
512 lineno = 0
513 for line in f:
514 lineno += 1
515 if (lineno >= license['beginline']) and ((lineno <= license['endline']) or not license['endline']):
516 license['text'].append(line.decode(errors='ignore'))
517 actual_md5.update(line)
518 license['actual_md5'] = actual_md5.hexdigest()
519 licenses.append(license)
520 return licenses
521
522def _generate_license_diff(old_licenses, new_licenses):
523 need_diff = False
524 for l in new_licenses:
525 if l['md5'] != l['actual_md5']:
526 need_diff = True
527 break
528 if need_diff == False:
529 return None
530
531 import difflib
532 diff = ''
533 for old, new in zip(old_licenses, new_licenses):
534 for line in difflib.unified_diff(old['text'], new['text'], old['path'], new['path']):
535 diff = diff + line
536 return diff
537
538def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil):
539 tasks = []
540 for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split():
541 logger.info('Running extra recipe upgrade task: %s' % task)
542 res = tinfoil.build_targets(pn, task, handle_events=True)
543
544 if not res:
545 raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn))
546
547def upgrade(args, config, basepath, workspace):
548 """Entry point for the devtool 'upgrade' subcommand"""
549
550 if args.recipename in workspace:
551 raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
552 if args.srcbranch and not args.srcrev:
553 raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
554
555 _check_git_config()
556
557 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
558 try:
559 rd = parse_recipe(config, tinfoil, args.recipename, True)
560 if not rd:
561 return 1
562
563 pn = rd.getVar('PN')
564 if pn != args.recipename:
565 logger.info('Mapping %s to %s' % (args.recipename, pn))
566 if pn in workspace:
567 raise DevtoolError("recipe %s is already in your workspace" % pn)
568
569 if args.srctree:
570 srctree = os.path.abspath(args.srctree)
571 else:
572 srctree = standard.get_default_srctree(config, pn)
573
574 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
575
576 # try to automatically discover latest version and revision if not provided on command line
577 if not args.version and not args.srcrev:
578 version_info = oe.recipeutils.get_recipe_upstream_version(rd)
579 if version_info['version'] and not version_info['version'].endswith("new-commits-available"):
580 args.version = version_info['version']
581 if version_info['revision']:
582 args.srcrev = version_info['revision']
583 if not args.version and not args.srcrev:
584 raise DevtoolError("Automatic discovery of latest version/revision failed - you must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option.")
585
586 standard._check_compatible_recipe(pn, rd)
587 old_srcrev = rd.getVar('SRCREV')
588 if old_srcrev == 'INVALID':
589 old_srcrev = None
590 if old_srcrev and not args.srcrev:
591 raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
592 old_ver = rd.getVar('PV')
593 if old_ver == args.version and old_srcrev == args.srcrev:
594 raise DevtoolError("Current and upgrade versions are the same version")
595 if args.version:
596 if bb.utils.vercmp_string(args.version, old_ver) < 0:
597 logger.warning('Upgrade version %s compares as less than the current version %s. If you are using a package feed for on-target upgrades or providing this recipe for general consumption, then you should increment PE in the recipe (or if there is no current PE value set, set it to "1")' % (args.version, old_ver))
598 check_prerelease_version(args.version, 'devtool upgrade')
599
600 rf = None
601 license_diff = None
602 try:
603 logger.info('Extracting current version source...')
604 rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
605 old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
606 logger.info('Extracting upgraded version source...')
607 rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
608 args.srcrev, args.srcbranch, args.branch, args.keep_temp,
609 tinfoil, rd)
610 new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
611 license_diff = _generate_license_diff(old_licenses, new_licenses)
612 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
613 except (bb.process.CmdError, DevtoolError) as e:
614 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN'))
615 _upgrade_error(e, recipedir, srctree, args.keep_failure)
616 standard._add_md5(config, pn, os.path.dirname(rf))
617
618 af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
619 copied, config.workspace_path, rd)
620 standard._add_md5(config, pn, af)
621
622 _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil)
623
624 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
625
626 logger.info('Upgraded source extracted to %s' % srctree)
627 logger.info('New recipe is %s' % rf)
628 if license_diff:
629 logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.')
630 preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN'))
631 if preferred_version:
632 logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version)
633 finally:
634 tinfoil.shutdown()
635 return 0
636
637def latest_version(args, config, basepath, workspace):
638 """Entry point for the devtool 'latest_version' subcommand"""
639 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
640 try:
641 rd = parse_recipe(config, tinfoil, args.recipename, True)
642 if not rd:
643 return 1
644 version_info = oe.recipeutils.get_recipe_upstream_version(rd)
645 # "new-commits-available" is an indication that upstream never issues version tags
646 if not version_info['version'].endswith("new-commits-available"):
647 logger.info("Current version: {}".format(version_info['current_version']))
648 logger.info("Latest version: {}".format(version_info['version']))
649 if version_info['revision']:
650 logger.info("Latest version's commit: {}".format(version_info['revision']))
651 else:
652 logger.info("Latest commit: {}".format(version_info['revision']))
653 finally:
654 tinfoil.shutdown()
655 return 0
656
657def check_upgrade_status(args, config, basepath, workspace):
658 def _print_status(recipe):
659 print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'],
660 recipe['cur_ver'],
661 recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"),
662 recipe['maintainer'],
663 recipe['revision'] if recipe['revision'] != 'N/A' else "",
664 "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else ""))
665 if not args.recipe:
666 logger.info("Checking the upstream status for all recipes may take a few minutes")
667 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
668 for recipegroup in results:
669 upgrades = [r for r in recipegroup if r['status'] != 'MATCH']
670 currents = [r for r in recipegroup if r['status'] == 'MATCH']
671 if len(upgrades) > 1:
672 print("These recipes need to be upgraded together {")
673 for r in sorted(upgrades, key=lambda r:r['pn']):
674 _print_status(r)
675 if len(upgrades) > 1:
676 print("}")
677 for r in currents:
678 if args.all:
679 _print_status(r)
680
681def register_commands(subparsers, context):
682 """Register devtool subcommands from this plugin"""
683
684 defsrctree = standard.get_default_srctree(context.config)
685
686 parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
687 description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
688 group='starting')
689 parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
690 parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
691 parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV). If omitted, latest upstream version will be determined and used, if possible.')
692 parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (useful when fetching from an SCM such as git)')
693 parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
694 parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
695 parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
696 parser_upgrade.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
697 group = parser_upgrade.add_mutually_exclusive_group()
698 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
699 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
700 parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
701 parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
702 parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
703
704 parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
705 description='Queries the upstream server for what the latest upstream release is (for git, tags are checked, for tarballs, a list of them is obtained, and one with the highest version number is reported)',
706 group='info')
707 parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
708 parser_latest_version.set_defaults(func=latest_version)
709
710 parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
711 description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
712 group='info')
713 parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
714 parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
715 parser_check_upgrade_status.set_defaults(func=check_upgrade_status)
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py
deleted file mode 100644
index bf39f71b11..0000000000
--- a/scripts/lib/devtool/utilcmds.py
+++ /dev/null
@@ -1,242 +0,0 @@
1# Development tool - utility commands plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool utility plugins"""
9
10import os
11import sys
12import shutil
13import tempfile
14import logging
15import argparse
16import subprocess
17import scriptutils
18from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
19from devtool import parse_recipe
20
21logger = logging.getLogger('devtool')
22
23def _find_recipe_path(args, config, basepath, workspace):
24 if args.any_recipe:
25 logger.warning('-a/--any-recipe option is now always active, and thus the option will be removed in a future release')
26 if args.recipename in workspace:
27 recipefile = workspace[args.recipename]['recipefile']
28 else:
29 recipefile = None
30 if not recipefile:
31 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
32 try:
33 rd = parse_recipe(config, tinfoil, args.recipename, True)
34 if not rd:
35 raise DevtoolError("Failed to find specified recipe")
36 recipefile = rd.getVar('FILE')
37 finally:
38 tinfoil.shutdown()
39 return recipefile
40
41
42def find_recipe(args, config, basepath, workspace):
43 """Entry point for the devtool 'find-recipe' subcommand"""
44 recipefile = _find_recipe_path(args, config, basepath, workspace)
45 print(recipefile)
46 return 0
47
48
49def edit_recipe(args, config, basepath, workspace):
50 """Entry point for the devtool 'edit-recipe' subcommand"""
51 return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
52
53
54def configure_help(args, config, basepath, workspace):
55 """Entry point for the devtool 'configure-help' subcommand"""
56 import oe.utils
57
58 check_workspace_recipe(workspace, args.recipename)
59 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
60 try:
61 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
62 if not rd:
63 return 1
64 b = rd.getVar('B')
65 s = rd.getVar('S')
66 configurescript = os.path.join(s, 'configure')
67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd))
68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
71 do_configure = rd.getVar('do_configure') or ''
72 do_configure_noexpand = rd.getVar('do_configure', False) or ''
73 packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
74 autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
75 cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
76 cmake_do_configure = rd.getVar('cmake_do_configure')
77 pn = rd.getVar('PN')
78 finally:
79 tinfoil.shutdown()
80
81 if 'doc' in packageconfig:
82 del packageconfig['doc']
83
84 if autotools and not os.path.exists(configurescript):
85 logger.info('Running do_configure to generate configure script')
86 try:
87 stdout, _ = exec_build_env_command(config.init_path, basepath,
88 'bitbake -c configure %s' % args.recipename,
89 stderr=subprocess.STDOUT)
90 except bb.process.ExecutionError:
91 pass
92
93 if confdisabled or do_configure.strip() in ('', ':'):
94 raise DevtoolError("do_configure task has been disabled for this recipe")
95 elif args.no_pager and not os.path.exists(configurescript):
96 raise DevtoolError("No configure script found and no other information to display")
97 else:
98 configopttext = ''
99 if autotools and configureopts:
100 configopttext = '''
101Arguments currently passed to the configure script:
102
103%s
104
105Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
106 if extra_oeconf:
107 configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
108
109%s''' % extra_oeconf
110
111 elif cmake:
112 in_cmake = False
113 cmake_cmd = ''
114 for line in cmake_do_configure.splitlines():
115 if in_cmake:
116 cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
117 if not line.endswith('\\'):
118 break
119 if line.lstrip().startswith('cmake '):
120 cmake_cmd = line.strip().rstrip('\\')
121 if line.endswith('\\'):
122 in_cmake = True
123 else:
124 break
125 if cmake_cmd:
126 configopttext = '''
127The current cmake command line:
128
129%s
130
131Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
132
133%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
134 else:
135 configopttext = '''
136The current implementation of cmake_do_configure:
137
138cmake_do_configure() {
139%s
140}
141
142Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
143
144%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
145
146 elif do_configure:
147 configopttext = '''
148The current implementation of do_configure:
149
150do_configure() {
151%s
152}''' % do_configure.rstrip()
153 if '${EXTRA_OECONF}' in do_configure_noexpand:
154 configopttext += '''
155
156Arguments specified through EXTRA_OECONF (which you can change or add to easily):
157
158%s''' % extra_oeconf
159
160 if packageconfig:
161 configopttext += '''
162
163Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
164
165 if args.arg:
166 helpargs = ' '.join(args.arg)
167 elif cmake:
168 helpargs = '-LH'
169 else:
170 helpargs = '--help'
171
172 msg = '''configure information for %s
173------------------------------------------
174%s''' % (pn, configopttext)
175
176 if cmake:
177 msg += '''
178
179The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
180------------------------------------------''' % (helpargs, pn)
181 elif os.path.exists(configurescript):
182 msg += '''
183
184The ./configure %s output for %s follows.
185------------------------------------------''' % (helpargs, pn)
186
187 olddir = os.getcwd()
188 tmppath = tempfile.mkdtemp()
189 with tempfile.NamedTemporaryFile('w', delete=False) as tf:
190 if not args.no_header:
191 tf.write(msg + '\n')
192 tf.close()
193 try:
194 try:
195 cmd = 'cat %s' % tf.name
196 if cmake:
197 cmd += '; cmake %s %s 2>&1' % (helpargs, s)
198 os.chdir(b)
199 elif os.path.exists(configurescript):
200 cmd += '; %s %s' % (configurescript, helpargs)
201 if sys.stdout.isatty() and not args.no_pager:
202 pager = os.environ.get('PAGER', 'less')
203 cmd = '(%s) | %s' % (cmd, pager)
204 subprocess.check_call(cmd, shell=True)
205 except subprocess.CalledProcessError as e:
206 return e.returncode
207 finally:
208 os.chdir(olddir)
209 shutil.rmtree(tmppath)
210 os.remove(tf.name)
211
212
213def register_commands(subparsers, context):
214 """Register devtool subcommands from this plugin"""
215 parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file',
216 description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
217 group='working')
218 parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
219 # FIXME drop -a at some point in future
220 parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
221 parser_edit_recipe.set_defaults(func=edit_recipe)
222
223 # Find-recipe
224 parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file',
225 description='Finds a recipe file. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
226 group='working')
227 parser_find_recipe.add_argument('recipename', help='Recipe to find')
228 # FIXME drop -a at some point in future
229 parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
230 parser_find_recipe.set_defaults(func=find_recipe)
231
232 # NOTE: Needed to override the usage string here since the default
233 # gets the order wrong - recipename must come before --arg
234 parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
235 usage='devtool configure-help [options] recipename [--arg ...]',
236 description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
237 group='working')
238 parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
239 parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
240 parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
241 parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
242 parser_configure_help.set_defaults(func=configure_help)
diff --git a/scripts/lib/recipetool/__init__.py b/scripts/lib/recipetool/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/recipetool/__init__.py
+++ /dev/null
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
deleted file mode 100644
index 10945d6008..0000000000
--- a/scripts/lib/recipetool/append.py
+++ /dev/null
@@ -1,477 +0,0 @@
1# Recipe creation tool - append plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import sys
9import os
10import argparse
11import glob
12import fnmatch
13import re
14import subprocess
15import logging
16import stat
17import shutil
18import scriptutils
19import errno
20from collections import defaultdict
21import difflib
22
23logger = logging.getLogger('recipetool')
24
25tinfoil = None
26
27def tinfoil_init(instance):
28 global tinfoil
29 tinfoil = instance
30
31
32# FIXME guessing when we don't have pkgdata?
33# FIXME mode to create patch rather than directly substitute
34
35class InvalidTargetFileError(Exception):
36 pass
37
38def find_target_file(targetpath, d, pkglist=None):
39 """Find the recipe installing the specified target path, optionally limited to a select list of packages"""
40 import json
41
42 pkgdata_dir = d.getVar('PKGDATA_DIR')
43
44 # The mix between /etc and ${sysconfdir} here may look odd, but it is just
45 # being consistent with usage elsewhere
46 invalidtargets = {'${sysconfdir}/version': '${sysconfdir}/version is written out at image creation time',
47 '/etc/timestamp': '/etc/timestamp is written out at image creation time',
48 '/dev/*': '/dev is handled by udev (or equivalent) and the kernel (devtmpfs)',
49 '/etc/passwd': '/etc/passwd should be managed through the useradd and extrausers classes',
50 '/etc/group': '/etc/group should be managed through the useradd and extrausers classes',
51 '/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes',
52 '/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes',
53 '${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname:pn-base-files = "value" in configuration',}
54
55 for pthspec, message in invalidtargets.items():
56 if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)):
57 raise InvalidTargetFileError(d.expand(message))
58
59 targetpath_re = re.compile(r'\s+(\$D)?%s(\s|$)' % targetpath)
60
61 recipes = defaultdict(list)
62 for root, dirs, files in os.walk(os.path.join(pkgdata_dir, 'runtime')):
63 if pkglist:
64 filelist = pkglist
65 else:
66 filelist = files
67 for fn in filelist:
68 pkgdatafile = os.path.join(root, fn)
69 if pkglist and not os.path.exists(pkgdatafile):
70 continue
71 with open(pkgdatafile, 'r') as f:
72 pn = ''
73 # This does assume that PN comes before other values, but that's a fairly safe assumption
74 for line in f:
75 if line.startswith('PN:'):
76 pn = line.split(': ', 1)[1].strip()
77 elif line.startswith('FILES_INFO'):
78 val = line.split(': ', 1)[1].strip()
79 dictval = json.loads(val)
80 for fullpth in dictval.keys():
81 if fnmatch.fnmatchcase(fullpth, targetpath):
82 recipes[targetpath].append(pn)
83 elif line.startswith('pkg_preinst:') or line.startswith('pkg_postinst:'):
84 scriptval = line.split(': ', 1)[1].strip().encode('utf-8').decode('unicode_escape')
85 if 'update-alternatives --install %s ' % targetpath in scriptval:
86 recipes[targetpath].append('?%s' % pn)
87 elif targetpath_re.search(scriptval):
88 recipes[targetpath].append('!%s' % pn)
89 return recipes
90
91def _parse_recipe(pn, tinfoil):
92 try:
93 rd = tinfoil.parse_recipe(pn)
94 except bb.providers.NoProvider as e:
95 logger.error(str(e))
96 return None
97 return rd
98
99def determine_file_source(targetpath, rd):
100 """Assuming we know a file came from a specific recipe, figure out exactly where it came from"""
101 import oe.recipeutils
102
103 # See if it's in do_install for the recipe
104 unpackdir = rd.getVar('UNPACKDIR')
105 src_uri = rd.getVar('SRC_URI')
106 srcfile = ''
107 modpatches = []
108 elements = check_do_install(rd, targetpath)
109 if elements:
110 logger.debug('do_install line:\n%s' % ' '.join(elements))
111 srcpath = get_source_path(elements)
112 logger.debug('source path: %s' % srcpath)
113 if not srcpath.startswith('/'):
114 # Handle non-absolute path
115 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
116 if srcpath.startswith(unpackdir):
117 # OK, now we have the source file name, look for it in SRC_URI
118 workdirfile = os.path.relpath(srcpath, unpackdir)
119 # FIXME this is where we ought to have some code in the fetcher, because this is naive
120 for item in src_uri.split():
121 localpath = bb.fetch2.localpath(item, rd)
122 # Source path specified in do_install might be a glob
123 if fnmatch.fnmatch(os.path.basename(localpath), workdirfile):
124 srcfile = 'file://%s' % localpath
125 elif '/' in workdirfile:
126 if item == 'file://%s' % workdirfile:
127 srcfile = 'file://%s' % localpath
128
129 # Check patches
130 srcpatches = []
131 patchedfiles = oe.recipeutils.get_recipe_patched_files(rd)
132 for patch, filelist in patchedfiles.items():
133 for fileitem in filelist:
134 if fileitem[0] == srcpath:
135 srcpatches.append((patch, fileitem[1]))
136 if srcpatches:
137 addpatch = None
138 for patch in srcpatches:
139 if patch[1] == 'A':
140 addpatch = patch[0]
141 else:
142 modpatches.append(patch[0])
143 if addpatch:
144 srcfile = 'patch://%s' % addpatch
145
146 return (srcfile, elements, modpatches)
147
148def get_source_path(cmdelements):
149 """Find the source path specified within a command"""
150 command = cmdelements[0]
151 if command in ['install', 'cp']:
152 helptext = subprocess.check_output('LC_ALL=C %s --help' % command, shell=True).decode('utf-8')
153 argopts = ''
154 argopt_line_re = re.compile('^-([a-zA-Z0-9]), --[a-z-]+=')
155 for line in helptext.splitlines():
156 line = line.lstrip()
157 res = argopt_line_re.search(line)
158 if res:
159 argopts += res.group(1)
160 if not argopts:
161 # Fallback
162 if command == 'install':
163 argopts = 'gmoSt'
164 elif command == 'cp':
165 argopts = 't'
166 else:
167 raise Exception('No fallback arguments for command %s' % command)
168
169 skipnext = False
170 for elem in cmdelements[1:-1]:
171 if elem.startswith('-'):
172 if len(elem) > 1 and elem[1] in argopts:
173 skipnext = True
174 continue
175 if skipnext:
176 skipnext = False
177 continue
178 return elem
179 else:
180 raise Exception('get_source_path: no handling for command "%s"')
181
182def get_func_deps(func, d):
183 """Find the function dependencies of a shell function"""
184 deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func))
185 deps |= set((d.getVarFlag(func, "vardeps") or "").split())
186 funcdeps = []
187 for dep in deps:
188 if d.getVarFlag(dep, 'func'):
189 funcdeps.append(dep)
190 return funcdeps
191
192def check_do_install(rd, targetpath):
193 """Look at do_install for a command that installs/copies the specified target path"""
194 instpath = os.path.abspath(os.path.join(rd.getVar('D'), targetpath.lstrip('/')))
195 do_install = rd.getVar('do_install')
196 # Handle where do_install calls other functions (somewhat crudely, but good enough for this purpose)
197 deps = get_func_deps('do_install', rd)
198 for dep in deps:
199 do_install = do_install.replace(dep, rd.getVar(dep))
200
201 # Look backwards through do_install as we want to catch where a later line (perhaps
202 # from a bbappend) is writing over the top
203 for line in reversed(do_install.splitlines()):
204 line = line.strip()
205 if (line.startswith('install ') and ' -m' in line) or line.startswith('cp '):
206 elements = line.split()
207 destpath = os.path.abspath(elements[-1])
208 if destpath == instpath:
209 return elements
210 elif destpath.rstrip('/') == os.path.dirname(instpath):
211 # FIXME this doesn't take recursive copy into account; unsure if it's practical to do so
212 srcpath = get_source_path(elements)
213 if fnmatch.fnmatchcase(os.path.basename(instpath), os.path.basename(srcpath)):
214 return elements
215 return None
216
217
218def appendfile(args):
219 import oe.recipeutils
220
221 stdout = ''
222 try:
223 (stdout, _) = bb.process.run('LANG=C file -b %s' % args.newfile, shell=True)
224 if 'cannot open' in stdout:
225 raise bb.process.ExecutionError(stdout)
226 except bb.process.ExecutionError as err:
227 logger.debug('file command returned error: %s' % err)
228 stdout = ''
229 if stdout:
230 logger.debug('file command output: %s' % stdout.rstrip())
231 if ('executable' in stdout and not 'shell script' in stdout) or 'shared object' in stdout:
232 logger.warning('This file looks like it is a binary or otherwise the output of compilation. If it is, you should consider building it properly instead of substituting a binary file directly.')
233
234 if args.recipe:
235 recipes = {args.targetpath: [args.recipe],}
236 else:
237 try:
238 recipes = find_target_file(args.targetpath, tinfoil.config_data)
239 except InvalidTargetFileError as e:
240 logger.error('%s cannot be handled by this tool: %s' % (args.targetpath, e))
241 return 1
242 if not recipes:
243 logger.error('Unable to find any package producing path %s - this may be because the recipe packaging it has not been built yet' % args.targetpath)
244 return 1
245
246 alternative_pns = []
247 postinst_pns = []
248
249 selectpn = None
250 for targetpath, pnlist in recipes.items():
251 for pn in pnlist:
252 if pn.startswith('?'):
253 alternative_pns.append(pn[1:])
254 elif pn.startswith('!'):
255 postinst_pns.append(pn[1:])
256 elif selectpn:
257 # hit here with multilibs
258 continue
259 else:
260 selectpn = pn
261
262 if not selectpn and len(alternative_pns) == 1:
263 selectpn = alternative_pns[0]
264 logger.error('File %s is an alternative possibly provided by recipe %s but seemingly no other, selecting it by default - you should double check other recipes' % (args.targetpath, selectpn))
265
266 if selectpn:
267 logger.debug('Selecting recipe %s for file %s' % (selectpn, args.targetpath))
268 if postinst_pns:
269 logger.warning('%s be modified by postinstall scripts for the following recipes:\n %s\nThis may or may not be an issue depending on what modifications these postinstall scripts make.' % (args.targetpath, '\n '.join(postinst_pns)))
270 rd = _parse_recipe(selectpn, tinfoil)
271 if not rd:
272 # Error message already shown
273 return 1
274 sourcefile, instelements, modpatches = determine_file_source(args.targetpath, rd)
275 sourcepath = None
276 if sourcefile:
277 sourcetype, sourcepath = sourcefile.split('://', 1)
278 logger.debug('Original source file is %s (%s)' % (sourcepath, sourcetype))
279 if sourcetype == 'patch':
280 logger.warning('File %s is added by the patch %s - you may need to remove or replace this patch in order to replace the file.' % (args.targetpath, sourcepath))
281 sourcepath = None
282 else:
283 logger.debug('Unable to determine source file, proceeding anyway')
284 if modpatches:
285 logger.warning('File %s is modified by the following patches:\n %s' % (args.targetpath, '\n '.join(modpatches)))
286
287 if instelements and sourcepath:
288 install = None
289 else:
290 # Auto-determine permissions
291 # Check destination
292 binpaths = '${bindir}:${sbindir}:${base_bindir}:${base_sbindir}:${libexecdir}:${sysconfdir}/init.d'
293 perms = '0644'
294 if os.path.abspath(os.path.dirname(args.targetpath)) in rd.expand(binpaths).split(':'):
295 # File is going into a directory normally reserved for executables, so it should be executable
296 perms = '0755'
297 else:
298 # Check source
299 st = os.stat(args.newfile)
300 if st.st_mode & stat.S_IXUSR:
301 perms = '0755'
302 install = {args.newfile: (args.targetpath, perms)}
303 if sourcepath:
304 sourcepath = os.path.basename(sourcepath)
305 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
306 tinfoil.modified_files()
307 return 0
308 else:
309 if alternative_pns:
310 logger.error('File %s is an alternative possibly provided by the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(alternative_pns)))
311 elif postinst_pns:
312 logger.error('File %s may be written out in a pre/postinstall script of the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(postinst_pns)))
313 return 3
314
315
316def appendsrc(args, files, rd, extralines=None):
317 import oe.recipeutils
318
319 srcdir = rd.getVar('S')
320 workdir = rd.getVar('WORKDIR')
321
322 import bb.fetch
323 simplified = {}
324 src_uri = rd.getVar('SRC_URI').split()
325 for uri in src_uri:
326 if uri.endswith(';'):
327 uri = uri[:-1]
328 simple_uri = bb.fetch.URI(uri)
329 simple_uri.params = {}
330 simplified[str(simple_uri)] = uri
331
332 copyfiles = {}
333 extralines = extralines or []
334 params = []
335 for newfile, srcfile in files.items():
336 src_destdir = os.path.dirname(srcfile)
337 if not args.use_workdir:
338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
339 srcdir = os.path.join(workdir, 'git')
340 if not bb.data.inherits_class('kernel-yocto', rd):
341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git')
342 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir)
343 src_destdir = os.path.normpath(src_destdir)
344
345 if src_destdir and src_destdir != '.':
346 params.append({'subdir': src_destdir})
347 else:
348 params.append({})
349
350 copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
351
352 dry_run_output = None
353 dry_run_outdir = None
354 if args.dry_run:
355 import tempfile
356 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
357 dry_run_outdir = dry_run_output.name
358
359 appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
360 redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
361 if not appendfile:
362 return
363 if args.dry_run:
364 output = ''
365 appendfilename = os.path.basename(appendfile)
366 newappendfile = appendfile
367 if appendfile and os.path.exists(appendfile):
368 with open(appendfile, 'r') as f:
369 oldlines = f.readlines()
370 else:
371 appendfile = '/dev/null'
372 oldlines = []
373
374 with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
375 newlines = f.readlines()
376 diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
377 difflines = list(diff)
378 if difflines:
379 output += ''.join(difflines)
380 if output:
381 logger.info('Diff of changed files:\n%s' % output)
382 else:
383 logger.info('No changed files')
384 tinfoil.modified_files()
385
386def appendsrcfiles(parser, args):
387 recipedata = _parse_recipe(args.recipe, tinfoil)
388 if not recipedata:
389 parser.error('RECIPE must be a valid recipe name')
390
391 files = dict((f, os.path.join(args.destdir, os.path.basename(f)))
392 for f in args.files)
393 return appendsrc(args, files, recipedata)
394
395
396def appendsrcfile(parser, args):
397 recipedata = _parse_recipe(args.recipe, tinfoil)
398 if not recipedata:
399 parser.error('RECIPE must be a valid recipe name')
400
401 if not args.destfile:
402 args.destfile = os.path.basename(args.file)
403 elif args.destfile.endswith('/'):
404 args.destfile = os.path.join(args.destfile, os.path.basename(args.file))
405
406 return appendsrc(args, {args.file: args.destfile}, recipedata)
407
408
409def layer(layerpath):
410 if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')):
411 raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath))
412 return layerpath
413
414
415def existing_path(filepath):
416 if not os.path.exists(filepath):
417 raise argparse.ArgumentTypeError('{0!r} must be an existing path'.format(filepath))
418 return filepath
419
420
421def existing_file(filepath):
422 filepath = existing_path(filepath)
423 if os.path.isdir(filepath):
424 raise argparse.ArgumentTypeError('{0!r} must be a file, not a directory'.format(filepath))
425 return filepath
426
427
428def destination_path(destpath):
429 if os.path.isabs(destpath):
430 raise argparse.ArgumentTypeError('{0!r} must be a relative path, not absolute'.format(destpath))
431 return destpath
432
433
434def target_path(targetpath):
435 if not os.path.isabs(targetpath):
436 raise argparse.ArgumentTypeError('{0!r} must be an absolute path, not relative'.format(targetpath))
437 return targetpath
438
439
440def register_commands(subparsers):
441 common = argparse.ArgumentParser(add_help=False)
442 common.add_argument('-m', '--machine', help='Make bbappend changes specific to a machine only', metavar='MACHINE')
443 common.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
444 common.add_argument('destlayer', metavar='DESTLAYER', help='Base directory of the destination layer to write the bbappend to', type=layer)
445
446 parser_appendfile = subparsers.add_parser('appendfile',
447 parents=[common],
448 help='Create/update a bbappend to replace a target file',
449 description='Creates a bbappend (or updates an existing one) to replace the specified file that appears in the target system, determining the recipe that packages the file and the required path and name for the bbappend automatically. Note that the ability to determine the recipe packaging a particular file depends upon the recipe\'s do_packagedata task having already run prior to running this command (which it will have when the recipe has been built successfully, which in turn will have happened if one or more of the recipe\'s packages is included in an image that has been built successfully).')
450 parser_appendfile.add_argument('targetpath', help='Path to the file to be replaced (as it would appear within the target image, e.g. /etc/motd)', type=target_path)
451 parser_appendfile.add_argument('newfile', help='Custom file to replace the target file with', type=existing_file)
452 parser_appendfile.add_argument('-r', '--recipe', help='Override recipe to apply to (default is to find which recipe already packages the file)')
453 parser_appendfile.set_defaults(func=appendfile, parserecipes=True)
454
455 common_src = argparse.ArgumentParser(add_help=False, parents=[common])
456 common_src.add_argument('-W', '--workdir', help='Unpack file into WORKDIR rather than S', dest='use_workdir', action='store_true')
457 common_src.add_argument('recipe', metavar='RECIPE', help='Override recipe to apply to')
458
459 parser = subparsers.add_parser('appendsrcfiles',
460 parents=[common_src],
461 help='Create/update a bbappend to add or replace source files',
462 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
463 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
464 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
465 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
466 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
467 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
468
469 parser = subparsers.add_parser('appendsrcfile',
470 parents=[common_src],
471 help='Create/update a bbappend to add or replace a source file',
472 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
473 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
474 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
475 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
476 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
477 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
deleted file mode 100644
index 94d52d6077..0000000000
--- a/scripts/lib/recipetool/create.py
+++ /dev/null
@@ -1,1439 +0,0 @@
1# Recipe creation tool - create command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import sys
9import os
10import argparse
11import glob
12import fnmatch
13import re
14import json
15import logging
16import scriptutils
17from urllib.parse import urlparse, urldefrag, urlsplit
18import hashlib
19import bb.fetch2
20logger = logging.getLogger('recipetool')
21
22tinfoil = None
23plugins = None
24
25def log_error_cond(message, debugonly):
26 if debugonly:
27 logger.debug(message)
28 else:
29 logger.error(message)
30
31def log_info_cond(message, debugonly):
32 if debugonly:
33 logger.debug(message)
34 else:
35 logger.info(message)
36
37def plugin_init(pluginlist):
38 # Take a reference to the list so we can use it later
39 global plugins
40 plugins = pluginlist
41
42def tinfoil_init(instance):
43 global tinfoil
44 tinfoil = instance
45
46class RecipeHandler(object):
47 recipelibmap = {}
48 recipeheadermap = {}
49 recipecmakefilemap = {}
50 recipebinmap = {}
51
52 def __init__(self):
53 self._devtool = False
54
55 @staticmethod
56 def load_libmap(d):
57 '''Load library->recipe mapping'''
58 import oe.package
59
60 if RecipeHandler.recipelibmap:
61 return
62 # First build up library->package mapping
63 d2 = bb.data.createCopy(d)
64 d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}")
65 shlib_providers = oe.package.read_shlib_providers(d2)
66 libdir = d.getVar('libdir')
67 base_libdir = d.getVar('base_libdir')
68 libpaths = list(set([base_libdir, libdir]))
69 libname_re = re.compile(r'^lib(.+)\.so.*$')
70 pkglibmap = {}
71 for lib, item in shlib_providers.items():
72 for path, pkg in item.items():
73 if path in libpaths:
74 res = libname_re.match(lib)
75 if res:
76 libname = res.group(1)
77 if not libname in pkglibmap:
78 pkglibmap[libname] = pkg[0]
79 else:
80 logger.debug('unable to extract library name from %s' % lib)
81
82 # Now turn it into a library->recipe mapping
83 pkgdata_dir = d.getVar('PKGDATA_DIR')
84 for libname, pkg in pkglibmap.items():
85 try:
86 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
87 for line in f:
88 if line.startswith('PN:'):
89 RecipeHandler.recipelibmap[libname] = line.split(':', 1)[-1].strip()
90 break
91 except IOError as ioe:
92 if ioe.errno == 2:
93 logger.warning('unable to find a pkgdata file for package %s' % pkg)
94 else:
95 raise
96
97 # Some overrides - these should be mapped to the virtual
98 RecipeHandler.recipelibmap['GL'] = 'virtual/libgl'
99 RecipeHandler.recipelibmap['EGL'] = 'virtual/egl'
100 RecipeHandler.recipelibmap['GLESv2'] = 'virtual/libgles2'
101
102 @staticmethod
103 def load_devel_filemap(d):
104 '''Build up development file->recipe mapping'''
105 if RecipeHandler.recipeheadermap:
106 return
107 pkgdata_dir = d.getVar('PKGDATA_DIR')
108 includedir = d.getVar('includedir')
109 cmakedir = os.path.join(d.getVar('libdir'), 'cmake')
110 for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')):
111 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
112 pn = None
113 headers = []
114 cmakefiles = []
115 for line in f:
116 if line.startswith('PN:'):
117 pn = line.split(':', 1)[-1].strip()
118 elif line.startswith('FILES_INFO:%s:' % pkg):
119 val = line.split(': ', 1)[1].strip()
120 dictval = json.loads(val)
121 for fullpth in sorted(dictval):
122 if fullpth.startswith(includedir) and fullpth.endswith('.h'):
123 headers.append(os.path.relpath(fullpth, includedir))
124 elif fullpth.startswith(cmakedir) and fullpth.endswith('.cmake'):
125 cmakefiles.append(os.path.relpath(fullpth, cmakedir))
126 if pn and headers:
127 for header in headers:
128 RecipeHandler.recipeheadermap[header] = pn
129 if pn and cmakefiles:
130 for fn in cmakefiles:
131 RecipeHandler.recipecmakefilemap[fn] = pn
132
133 @staticmethod
134 def load_binmap(d):
135 '''Build up native binary->recipe mapping'''
136 if RecipeHandler.recipebinmap:
137 return
138 sstate_manifests = d.getVar('SSTATE_MANIFESTS')
139 staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE')
140 build_arch = d.getVar('BUILD_ARCH')
141 fileprefix = 'manifest-%s-' % build_arch
142 for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)):
143 with open(fn, 'r') as f:
144 pn = os.path.basename(fn).rsplit('.', 1)[0][len(fileprefix):]
145 for line in f:
146 if line.startswith(staging_bindir_native):
147 prog = os.path.basename(line.rstrip())
148 RecipeHandler.recipebinmap[prog] = pn
149
150 @staticmethod
151 def checkfiles(path, speclist, recursive=False, excludedirs=None):
152 results = []
153 if recursive:
154 for root, dirs, files in os.walk(path, topdown=True):
155 if excludedirs:
156 dirs[:] = [d for d in dirs if d not in excludedirs]
157 for fn in files:
158 for spec in speclist:
159 if fnmatch.fnmatch(fn, spec):
160 results.append(os.path.join(root, fn))
161 else:
162 for spec in speclist:
163 results.extend(glob.glob(os.path.join(path, spec)))
164 return results
165
166 @staticmethod
167 def handle_depends(libdeps, pcdeps, deps, outlines, values, d):
168 if pcdeps:
169 recipemap = read_pkgconfig_provides(d)
170 if libdeps:
171 RecipeHandler.load_libmap(d)
172
173 ignorelibs = ['socket']
174 ignoredeps = ['gcc-runtime', 'glibc', 'uclibc', 'musl', 'tar-native', 'binutils-native', 'coreutils-native']
175
176 unmappedpc = []
177 pcdeps = list(set(pcdeps))
178 for pcdep in pcdeps:
179 if isinstance(pcdep, str):
180 recipe = recipemap.get(pcdep, None)
181 if recipe:
182 deps.append(recipe)
183 else:
184 if not pcdep.startswith('$'):
185 unmappedpc.append(pcdep)
186 else:
187 for item in pcdep:
188 recipe = recipemap.get(pcdep, None)
189 if recipe:
190 deps.append(recipe)
191 break
192 else:
193 unmappedpc.append('(%s)' % ' or '.join(pcdep))
194
195 unmappedlibs = []
196 for libdep in libdeps:
197 if isinstance(libdep, tuple):
198 lib, header = libdep
199 else:
200 lib = libdep
201 header = None
202
203 if lib in ignorelibs:
204 logger.debug('Ignoring library dependency %s' % lib)
205 continue
206
207 recipe = RecipeHandler.recipelibmap.get(lib, None)
208 if recipe:
209 deps.append(recipe)
210 elif recipe is None:
211 if header:
212 RecipeHandler.load_devel_filemap(d)
213 recipe = RecipeHandler.recipeheadermap.get(header, None)
214 if recipe:
215 deps.append(recipe)
216 elif recipe is None:
217 unmappedlibs.append(lib)
218 else:
219 unmappedlibs.append(lib)
220
221 deps = set(deps).difference(set(ignoredeps))
222
223 if unmappedpc:
224 outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmappedpc))
225 outlines.append('# (this is based on recipes that have previously been built and packaged)')
226
227 if unmappedlibs:
228 outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs))))
229 outlines.append('# (this is based on recipes that have previously been built and packaged)')
230
231 if deps:
232 values['DEPENDS'] = ' '.join(deps)
233
234 @staticmethod
235 def genfunction(outlines, funcname, content, python=False, forcespace=False):
236 if python:
237 prefix = 'python '
238 else:
239 prefix = ''
240 outlines.append('%s%s () {' % (prefix, funcname))
241 if python or forcespace:
242 indent = ' '
243 else:
244 indent = '\t'
245 addnoop = not python
246 for line in content:
247 outlines.append('%s%s' % (indent, line))
248 if addnoop:
249 strippedline = line.lstrip()
250 if strippedline and not strippedline.startswith('#'):
251 addnoop = False
252 if addnoop:
253 # Without this there'll be a syntax error
254 outlines.append('%s:' % indent)
255 outlines.append('}')
256 outlines.append('')
257
258 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
259 return False
260
261
262def validate_pv(pv):
263 if not pv or '_version' in pv.lower() or pv[0] not in '0123456789':
264 return False
265 return True
266
267def determine_from_filename(srcfile):
268 """Determine name and version from a filename"""
269 if is_package(srcfile):
270 # Force getting the value from the package metadata
271 return None, None
272
273 if '.tar.' in srcfile:
274 namepart = srcfile.split('.tar.')[0]
275 else:
276 namepart = os.path.splitext(srcfile)[0]
277 namepart = namepart.lower().replace('_', '-')
278 if namepart.endswith('.src'):
279 namepart = namepart[:-4]
280 if namepart.endswith('.orig'):
281 namepart = namepart[:-5]
282 splitval = namepart.split('-')
283 logger.debug('determine_from_filename: split name %s into: %s' % (srcfile, splitval))
284
285 ver_re = re.compile('^v?[0-9]')
286
287 pv = None
288 pn = None
289 if len(splitval) == 1:
290 # Try to split the version out if there is no separator (or a .)
291 res = re.match('^([^0-9]+)([0-9.]+.*)$', namepart)
292 if res:
293 if len(res.group(1)) > 1 and len(res.group(2)) > 1:
294 pn = res.group(1).rstrip('.')
295 pv = res.group(2)
296 else:
297 pn = namepart
298 else:
299 if splitval[-1] in ['source', 'src']:
300 splitval.pop()
301 if len(splitval) > 2 and re.match('^(alpha|beta|stable|release|rc[0-9]|pre[0-9]|p[0-9]|[0-9]{8})', splitval[-1]) and ver_re.match(splitval[-2]):
302 pv = '-'.join(splitval[-2:])
303 if pv.endswith('-release'):
304 pv = pv[:-8]
305 splitval = splitval[:-2]
306 elif ver_re.match(splitval[-1]):
307 pv = splitval.pop()
308 pn = '-'.join(splitval)
309 if pv and pv.startswith('v'):
310 pv = pv[1:]
311 logger.debug('determine_from_filename: name = "%s" version = "%s"' % (pn, pv))
312 return (pn, pv)
313
314def determine_from_url(srcuri):
315 """Determine name and version from a URL"""
316 pn = None
317 pv = None
318 parseres = urlparse(srcuri.lower().split(';', 1)[0])
319 if parseres.path:
320 if 'github.com' in parseres.netloc:
321 res = re.search(r'.*/(.*?)/archive/(.*)-final\.(tar|zip)', parseres.path)
322 if res:
323 pn = res.group(1).strip().replace('_', '-')
324 pv = res.group(2).strip().replace('_', '.')
325 else:
326 res = re.search(r'.*/(.*?)/archive/v?(.*)\.(tar|zip)', parseres.path)
327 if res:
328 pn = res.group(1).strip().replace('_', '-')
329 pv = res.group(2).strip().replace('_', '.')
330 elif 'bitbucket.org' in parseres.netloc:
331 res = re.search(r'.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9a-zA-Z_.]*)\.(tar|zip)', parseres.path)
332 if res:
333 pn = res.group(1).strip().replace('_', '-')
334 pv = res.group(2).strip().replace('_', '.')
335
336 if not pn and not pv:
337 if parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']:
338 srcfile = os.path.basename(parseres.path.rstrip('/'))
339 pn, pv = determine_from_filename(srcfile)
340 elif parseres.scheme in ['git', 'gitsm']:
341 pn = os.path.basename(parseres.path.rstrip('/')).lower().replace('_', '-')
342 if pn.endswith('.git'):
343 pn = pn[:-4]
344
345 logger.debug('Determined from source URL: name = "%s", version = "%s"' % (pn, pv))
346 return (pn, pv)
347
348def supports_srcrev(uri):
349 localdata = bb.data.createCopy(tinfoil.config_data)
350 # This is a bit sad, but if you don't have this set there can be some
351 # odd interactions with the urldata cache which lead to errors
352 localdata.setVar('SRCREV', '${AUTOREV}')
353 try:
354 fetcher = bb.fetch2.Fetch([uri], localdata)
355 urldata = fetcher.ud
356 for u in urldata:
357 if urldata[u].method.supports_srcrev():
358 return True
359 except bb.fetch2.FetchError as e:
360 logger.debug('FetchError in supports_srcrev: %s' % str(e))
361 # Fall back to basic check
362 if uri.startswith(('git://', 'gitsm://')):
363 return True
364 return False
365
366def reformat_git_uri(uri):
367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]'''
368 checkuri = uri.split(';', 1)[0]
369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri):
370 # Appends scheme if the scheme is missing
371 if not '://' in uri:
372 uri = 'git://' + uri
373 scheme, host, path, user, pswd, parms = bb.fetch2.decodeurl(uri)
374 # Detection mechanism, this is required due to certain URL are formatter with ":" rather than "/"
375 # which causes decodeurl to fail getting the right host and path
376 if len(host.split(':')) > 1:
377 splitslash = host.split(':')
378 # Port number should not be split from host
379 if not re.match('^[0-9]+$', splitslash[1]):
380 host = splitslash[0]
381 path = '/' + splitslash[1] + path
382 #Algorithm:
383 # if user is defined, append protocol=ssh or if a protocol is defined, then honor the user-defined protocol
384 # if no user & password is defined, check for scheme type and append the protocol with the scheme type
385 # finally if protocols or if the url is well-formed, do nothing and rejoin everything back to normal
386 # Need to repackage the arguments for encodeurl, the format is: (scheme, host, path, user, password, OrderedDict([('key', 'value')]))
387 if user:
388 if not 'protocol' in parms:
389 parms.update({('protocol', 'ssh')})
390 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms):
391 parms.update({('protocol', scheme)})
392 # Always append 'git://'
393 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms))
394 return fUrl
395 else:
396 return uri
397
398def is_package(url):
399 '''Check if a URL points to a package'''
400 checkurl = url.split(';', 1)[0]
401 if checkurl.endswith(('.deb', '.ipk', '.rpm', '.srpm')):
402 return True
403 return False
404
405def create_recipe(args):
406 import bb.process
407 import tempfile
408 import shutil
409 import oe.recipeutils
410
411 pkgarch = ""
412 if args.machine:
413 pkgarch = "${MACHINE_ARCH}"
414
415 extravalues = {}
416 checksums = {}
417 tempsrc = ''
418 source = args.source
419 srcsubdir = ''
420 srcrev = '${AUTOREV}'
421 srcbranch = ''
422 scheme = ''
423 storeTagName = ''
424 pv_srcpv = False
425
426 handled = []
427 classes = []
428
429 # Find all plugins that want to register handlers
430 logger.debug('Loading recipe handlers')
431 raw_handlers = []
432 for plugin in plugins:
433 if hasattr(plugin, 'register_recipe_handlers'):
434 plugin.register_recipe_handlers(raw_handlers)
435 # Sort handlers by priority
436 handlers = []
437 for i, handler in enumerate(raw_handlers):
438 if isinstance(handler, tuple):
439 handlers.append((handler[0], handler[1], i))
440 else:
441 handlers.append((handler, 0, i))
442 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
443 for handler, priority, _ in handlers:
444 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
445 setattr(handler, '_devtool', args.devtool)
446 handlers = [item[0] for item in handlers]
447
448 fetchuri = None
449 for handler in handlers:
450 if hasattr(handler, 'process_url'):
451 ret = handler.process_url(args, classes, handled, extravalues)
452 if 'url' in handled and ret:
453 fetchuri = ret
454 break
455
456 if os.path.isfile(source):
457 source = 'file://%s' % os.path.abspath(source)
458
459 if scriptutils.is_src_url(source):
460 # Warn about github archive URLs
461 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
462 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
463 # Fetch a URL
464 if not fetchuri:
465 fetchuri = reformat_git_uri(urldefrag(source)[0])
466 if args.binary:
467 # Assume the archive contains the directory structure verbatim
468 # so we need to extract to a subdirectory
469 fetchuri += ';subdir=${BPN}'
470 srcuri = fetchuri
471 rev_re = re.compile(';rev=([^;]+)')
472 res = rev_re.search(srcuri)
473 if res:
474 if args.srcrev:
475 logger.error('rev= parameter and -S/--srcrev option cannot both be specified - use one or the other')
476 sys.exit(1)
477 if args.autorev:
478 logger.error('rev= parameter and -a/--autorev option cannot both be specified - use one or the other')
479 sys.exit(1)
480 srcrev = res.group(1)
481 srcuri = rev_re.sub('', srcuri)
482 elif args.srcrev:
483 srcrev = args.srcrev
484
485 # Check whether users provides any branch info in fetchuri.
486 # If true, we will skip all branch checking process to honor all user's input.
487 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(fetchuri)
488 srcbranch = params.get('branch')
489 if args.srcbranch:
490 if srcbranch:
491 logger.error('branch= parameter and -B/--srcbranch option cannot both be specified - use one or the other')
492 sys.exit(1)
493 srcbranch = args.srcbranch
494 params['branch'] = srcbranch
495 nobranch = params.get('nobranch')
496 if nobranch and srcbranch:
497 logger.error('nobranch= cannot be used if you specify a branch')
498 sys.exit(1)
499 tag = params.get('tag')
500 if not srcbranch and not nobranch and srcrev != '${AUTOREV}':
501 # Append nobranch=1 in the following conditions:
502 # 1. User did not set 'branch=' in srcuri, and
503 # 2. User did not set 'nobranch=1' in srcuri, and
504 # 3. Source revision is not '${AUTOREV}'
505 params['nobranch'] = '1'
506 if tag:
507 # Keep a copy of tag and append nobranch=1 then remove tag from URL.
508 # Bitbake fetcher unable to fetch when {AUTOREV} and tag is set at the same time.
509 storeTagName = params['tag']
510 params['nobranch'] = '1'
511 del params['tag']
512 # Assume 'master' branch if not set
513 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params:
514 params['branch'] = 'master'
515 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
516
517 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
518 bb.utils.mkdirhier(tmpparent)
519 tempsrc = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
520 srctree = os.path.join(tempsrc, 'source')
521
522 try:
523 checksums, ftmpdir = scriptutils.fetch_url(tinfoil, fetchuri, srcrev, srctree, logger, preserve_tmp=args.keep_temp)
524 except scriptutils.FetchUrlFailure as e:
525 logger.error(str(e))
526 sys.exit(1)
527
528 if ftmpdir and args.keep_temp:
529 logger.info('Fetch temp directory is %s' % ftmpdir)
530
531 dirlist = os.listdir(srctree)
532 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
533 if len(dirlist) == 1:
534 singleitem = os.path.join(srctree, dirlist[0])
535 if os.path.isdir(singleitem):
536 # We unpacked a single directory, so we should use that
537 srcsubdir = dirlist[0]
538 srctree = os.path.join(srctree, srcsubdir)
539 else:
540 check_single_file(dirlist[0], fetchuri)
541 elif len(dirlist) == 0:
542 if '/' in fetchuri:
543 fn = os.path.join(tinfoil.config_data.getVar('DL_DIR'), fetchuri.split('/')[-1])
544 if os.path.isfile(fn):
545 check_single_file(fn, fetchuri)
546 # If we've got to here then there's no source so we might as well give up
547 logger.error('URL %s resulted in an empty source tree' % fetchuri)
548 sys.exit(1)
549
550 # We need this checking mechanism to improve the recipe created by recipetool and devtool
551 # is able to parse and build by bitbake.
552 # If there is no input for branch name, then check for branch name with SRCREV provided.
553 if not srcbranch and not nobranch and srcrev and (srcrev != '${AUTOREV}') and scheme in ['git', 'gitsm']:
554 try:
555 cmd = 'git branch -r --contains'
556 check_branch, check_branch_err = bb.process.run('%s %s' % (cmd, srcrev), cwd=srctree)
557 except bb.process.ExecutionError as err:
558 logger.error(str(err))
559 sys.exit(1)
560 get_branch = [x.strip() for x in check_branch.splitlines()]
561 # Remove HEAD reference point and drop remote prefix
562 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
563 if 'master' in get_branch:
564 # Even with the case where get_branch has multiple objects, if 'master' is one
565 # of them, we should default take from 'master'
566 srcbranch = 'master'
567 elif len(get_branch) == 1:
568 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
569 srcbranch = get_branch[0]
570 else:
571 # If get_branch contains more than one objects, then display error and exit.
572 mbrch = '\n ' + '\n '.join(get_branch)
573 logger.error('Revision %s was found on multiple branches: %s\nPlease provide the correct branch with -B/--srcbranch' % (srcrev, mbrch))
574 sys.exit(1)
575
576 # Since we might have a value in srcbranch, we need to
577 # recontruct the srcuri to include 'branch' in params.
578 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri)
579 if scheme in ['git', 'gitsm']:
580 params['branch'] = srcbranch or 'master'
581
582 if storeTagName and scheme in ['git', 'gitsm']:
583 # Check srcrev using tag and check validity of the tag
584 cmd = ('git rev-parse --verify %s' % (storeTagName))
585 try:
586 check_tag, check_tag_err = bb.process.run('%s' % cmd, cwd=srctree)
587 srcrev = check_tag.split()[0]
588 except bb.process.ExecutionError as err:
589 logger.error(str(err))
590 logger.error("Possibly wrong tag name is provided")
591 sys.exit(1)
592 # Drop tag from srcuri as it will have conflicts with SRCREV during recipe parse.
593 del params['tag']
594 srcuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
595
596 if os.path.exists(os.path.join(srctree, '.gitmodules')) and srcuri.startswith('git://'):
597 srcuri = 'gitsm://' + srcuri[6:]
598 logger.info('Fetching submodules...')
599 bb.process.run('git submodule update --init --recursive', cwd=srctree)
600
601 if is_package(fetchuri):
602 localdata = bb.data.createCopy(tinfoil.config_data)
603 pkgfile = bb.fetch2.localpath(fetchuri, localdata)
604 if pkgfile:
605 tmpfdir = tempfile.mkdtemp(prefix='recipetool-')
606 try:
607 if pkgfile.endswith(('.deb', '.ipk')):
608 stdout, _ = bb.process.run('ar x %s' % pkgfile, cwd=tmpfdir)
609 stdout, _ = bb.process.run('tar xf control.tar.gz', cwd=tmpfdir)
610 values = convert_debian(tmpfdir)
611 extravalues.update(values)
612 elif pkgfile.endswith(('.rpm', '.srpm')):
613 stdout, _ = bb.process.run('rpm -qp --xml %s > pkginfo.xml' % pkgfile, cwd=tmpfdir)
614 values = convert_rpm_xml(os.path.join(tmpfdir, 'pkginfo.xml'))
615 extravalues.update(values)
616 finally:
617 shutil.rmtree(tmpfdir)
618 else:
619 # Assume we're pointing to an existing source tree
620 if args.extract_to:
621 logger.error('--extract-to cannot be specified if source is a directory')
622 sys.exit(1)
623 if not os.path.isdir(source):
624 logger.error('Invalid source directory %s' % source)
625 sys.exit(1)
626 srctree = source
627 srcuri = ''
628 if os.path.exists(os.path.join(srctree, '.git')):
629 # Try to get upstream repo location from origin remote
630 try:
631 stdout, _ = bb.process.run('git remote -v', cwd=srctree, shell=True)
632 except bb.process.ExecutionError as e:
633 stdout = None
634 if stdout:
635 for line in stdout.splitlines():
636 splitline = line.split()
637 if len(splitline) > 1:
638 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
639 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
640 srcsubdir = 'git'
641 break
642
643 if args.src_subdir:
644 srcsubdir = os.path.join(srcsubdir, args.src_subdir)
645 srctree_use = os.path.abspath(os.path.join(srctree, args.src_subdir))
646 else:
647 srctree_use = os.path.abspath(srctree)
648
649 if args.outfile and os.path.isdir(args.outfile):
650 outfile = None
651 outdir = args.outfile
652 else:
653 outfile = args.outfile
654 outdir = None
655 if outfile and outfile != '-':
656 if os.path.exists(outfile):
657 logger.error('Output file %s already exists' % outfile)
658 sys.exit(1)
659
660 lines_before = []
661 lines_after = []
662
663 lines_before.append('# Recipe created by %s' % os.path.basename(sys.argv[0]))
664 lines_before.append('# This is the basis of a recipe and may need further editing in order to be fully functional.')
665 lines_before.append('# (Feel free to remove these comments when editing.)')
666 # We need a blank line here so that patch_recipe_lines can rewind before the LICENSE comments
667 lines_before.append('')
668
669 # We'll come back and replace this later in handle_license_vars()
670 lines_before.append('##LICENSE_PLACEHOLDER##')
671
672
673 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this
674 pn = None
675 pv = None
676 if outfile:
677 recipefn = os.path.splitext(os.path.basename(outfile))[0]
678 fnsplit = recipefn.split('_')
679 if len(fnsplit) > 1:
680 pn = fnsplit[0]
681 pv = fnsplit[1]
682 else:
683 pn = recipefn
684
685 if args.version:
686 pv = args.version
687
688 if args.name:
689 pn = args.name
690 if args.name.endswith('-native'):
691 if args.also_native:
692 logger.error('--also-native cannot be specified for a recipe named *-native (*-native denotes a recipe that is already only for native) - either remove the -native suffix from the name or drop --also-native')
693 sys.exit(1)
694 classes.append('native')
695 elif args.name.startswith('nativesdk-'):
696 if args.also_native:
697 logger.error('--also-native cannot be specified for a recipe named nativesdk-* (nativesdk-* denotes a recipe that is already only for nativesdk)')
698 sys.exit(1)
699 classes.append('nativesdk')
700
701 if pv and pv not in 'git svn hg'.split():
702 realpv = pv
703 else:
704 realpv = None
705
706 if not srcuri:
707 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
708 lines_before.append('SRC_URI = "%s"' % srcuri)
709 shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
710 for key, value in sorted(checksums.items()):
711 if key in shown_checksums:
712 lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
713 if srcuri and supports_srcrev(srcuri):
714 lines_before.append('')
715 lines_before.append('# Modify these as desired')
716 # Note: we have code to replace realpv further down if it gets set to some other value
717 scheme, _, _, _, _, _ = bb.fetch2.decodeurl(srcuri)
718 if scheme in ['git', 'gitsm']:
719 srcpvprefix = 'git'
720 elif scheme == 'svn':
721 srcpvprefix = 'svnr'
722 else:
723 srcpvprefix = scheme
724 lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix))
725 pv_srcpv = True
726 if not args.autorev and srcrev == '${AUTOREV}':
727 if os.path.exists(os.path.join(srctree, '.git')):
728 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
729 srcrev = stdout.rstrip()
730 lines_before.append('SRCREV = "%s"' % srcrev)
731 if args.provides:
732 lines_before.append('PROVIDES = "%s"' % args.provides)
733 lines_before.append('')
734
735 if srcsubdir and not args.binary:
736 # (for binary packages we explicitly specify subdir= when fetching to
737 # match the default value of S, so we don't need to set it in that case)
738 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir)
739 lines_before.append('')
740
741 if pkgarch:
742 lines_after.append('PACKAGE_ARCH = "%s"' % pkgarch)
743 lines_after.append('')
744
745 if args.binary:
746 lines_after.append('INSANE_SKIP:${PN} += "already-stripped"')
747 lines_after.append('')
748
749 if args.npm_dev:
750 extravalues['NPM_INSTALL_DEV'] = 1
751
752 # Apply the handlers
753 if args.binary:
754 classes.append('bin_package')
755 handled.append('buildsystem')
756
757 for handler in handlers:
758 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues)
759
760 # native and nativesdk classes are special and must be inherited last
761 # If present, put them at the end of the classes list
762 classes.sort(key=lambda c: c in ("native", "nativesdk"))
763
764 extrafiles = extravalues.pop('extrafiles', {})
765 extra_pn = extravalues.pop('PN', None)
766 extra_pv = extravalues.pop('PV', None)
767
768 if extra_pv and not realpv:
769 realpv = extra_pv
770 if not validate_pv(realpv):
771 realpv = None
772 else:
773 realpv = realpv.lower().split()[0]
774 if '_' in realpv:
775 realpv = realpv.replace('_', '-')
776 if extra_pn and not pn:
777 pn = extra_pn
778 if pn.startswith('GNU '):
779 pn = pn[4:]
780 if ' ' in pn:
781 # Probably a descriptive identifier rather than a proper name
782 pn = None
783 else:
784 pn = pn.lower()
785 if '_' in pn:
786 pn = pn.replace('_', '-')
787
788 if srcuri and not realpv or not pn:
789 name_pn, name_pv = determine_from_url(srcuri)
790 if name_pn and not pn:
791 pn = name_pn
792 if name_pv and not realpv:
793 realpv = name_pv
794
795 licvalues = handle_license_vars(srctree_use, lines_before, handled, extravalues, tinfoil.config_data)
796
797 if not outfile:
798 if not pn:
799 log_error_cond('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile', args.devtool)
800 # devtool looks for this specific exit code, so don't change it
801 sys.exit(15)
802 else:
803 if srcuri and srcuri.startswith(('gitsm://', 'git://', 'hg://', 'svn://')):
804 suffix = srcuri.split(':', 1)[0]
805 if suffix == 'gitsm':
806 suffix = 'git'
807 outfile = '%s_%s.bb' % (pn, suffix)
808 elif realpv:
809 outfile = '%s_%s.bb' % (pn, realpv)
810 else:
811 outfile = '%s.bb' % pn
812 if outdir:
813 outfile = os.path.join(outdir, outfile)
814 # We need to check this again
815 if os.path.exists(outfile):
816 logger.error('Output file %s already exists' % outfile)
817 sys.exit(1)
818
819 # Move any extra files the plugins created to a directory next to the recipe
820 if extrafiles:
821 if outfile == '-':
822 extraoutdir = pn
823 else:
824 extraoutdir = os.path.join(os.path.dirname(outfile), pn)
825 bb.utils.mkdirhier(extraoutdir)
826 for destfn, extrafile in extrafiles.items():
827 shutil.move(extrafile, os.path.join(extraoutdir, destfn))
828
829 lines = lines_before
830 lines_before = []
831 skipblank = True
832 for line in lines:
833 if skipblank:
834 skipblank = False
835 if not line:
836 continue
837 if line.startswith('S = '):
838 if realpv and pv not in 'git svn hg'.split():
839 line = line.replace(realpv, '${PV}')
840 if pn:
841 line = line.replace(pn, '${BPN}')
842 if line == 'S = "${WORKDIR}/${BPN}-${PV}"':
843 skipblank = True
844 continue
845 elif line.startswith('SRC_URI = '):
846 if realpv and not pv_srcpv:
847 line = line.replace(realpv, '${PV}')
848 elif line.startswith('PV = '):
849 if realpv:
850 # Replace the first part of the PV value
851 line = re.sub(r'"[^+]*\+', '"%s+' % realpv, line)
852 lines_before.append(line)
853
854 if args.also_native:
855 lines = lines_after
856 lines_after = []
857 bbclassextend = None
858 for line in lines:
859 if line.startswith('BBCLASSEXTEND ='):
860 splitval = line.split('"')
861 if len(splitval) > 1:
862 bbclassextend = splitval[1].split()
863 if not 'native' in bbclassextend:
864 bbclassextend.insert(0, 'native')
865 line = 'BBCLASSEXTEND = "%s"' % ' '.join(bbclassextend)
866 lines_after.append(line)
867 if not bbclassextend:
868 lines_after.append('BBCLASSEXTEND = "native"')
869
870 postinst = ("postinst", extravalues.pop('postinst', None))
871 postrm = ("postrm", extravalues.pop('postrm', None))
872 preinst = ("preinst", extravalues.pop('preinst', None))
873 prerm = ("prerm", extravalues.pop('prerm', None))
874 funcs = [postinst, postrm, preinst, prerm]
875 for func in funcs:
876 if func[1]:
877 RecipeHandler.genfunction(lines_after, 'pkg_%s_${PN}' % func[0], func[1])
878
879 outlines = []
880 outlines.extend(lines_before)
881 if classes:
882 if outlines[-1] and not outlines[-1].startswith('#'):
883 outlines.append('')
884 outlines.append('inherit %s' % ' '.join(classes))
885 outlines.append('')
886 outlines.extend(lines_after)
887
888 outlines = [ line.rstrip('\n') +"\n" for line in outlines]
889
890 if extravalues:
891 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True)
892
893 if args.extract_to:
894 scriptutils.git_convert_standalone_clone(srctree)
895 if os.path.isdir(args.extract_to):
896 # If the directory exists we'll move the temp dir into it instead of
897 # its contents - of course, we could try to always move its contents
898 # but that is a pain if there are symlinks; the simplest solution is
899 # to just remove it first
900 os.rmdir(args.extract_to)
901 shutil.move(srctree, args.extract_to)
902 if tempsrc == srctree:
903 tempsrc = None
904 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool)
905
906 if outfile == '-':
907 sys.stdout.write(''.join(outlines) + '\n')
908 else:
909 with open(outfile, 'w') as f:
910 lastline = None
911 for line in outlines:
912 if not lastline and not line:
913 # Skip extra blank lines
914 continue
915 f.write('%s' % line)
916 lastline = line
917 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
918 tinfoil.modified_files()
919
920 if tempsrc:
921 if args.keep_temp:
922 logger.info('Preserving temporary directory %s' % tempsrc)
923 else:
924 shutil.rmtree(tempsrc)
925
926 return 0
927
928def check_single_file(fn, fetchuri):
929 """Determine if a single downloaded file is something we can't handle"""
930 with open(fn, 'r', errors='surrogateescape') as f:
931 if '<html' in f.read(100).lower():
932 logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri)
933 sys.exit(1)
934
935def split_value(value):
936 if isinstance(value, str):
937 return value.split()
938 else:
939 return value
940
941def fixup_license(value):
942 # Ensure licenses with OR starts and ends with brackets
943 if '|' in value:
944 return '(' + value + ')'
945 return value
946
947def tidy_licenses(value):
948 """Flat, split and sort licenses"""
949 from oe.license import flattened_licenses
950 def _choose(a, b):
951 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
952 return ["(%s | %s)" % (str_a, str_b)]
953 if not isinstance(value, str):
954 value = " & ".join(value)
955 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
956
957def handle_license_vars(srctree, lines_before, handled, extravalues, d):
958 lichandled = [x for x in handled if x[0] == 'license']
959 if lichandled:
960 # Someone else has already handled the license vars, just return their value
961 return lichandled[0][1]
962
963 licvalues = find_licenses(srctree, d)
964 licenses = []
965 lic_files_chksum = []
966 lic_unknown = []
967 lines = []
968 if licvalues:
969 for licvalue in licvalues:
970 license = licvalue[0]
971 lics = tidy_licenses(fixup_license(license))
972 lics = [lic for lic in lics if lic not in licenses]
973 if len(lics):
974 licenses.extend(lics)
975 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2]))
976 if license == 'Unknown':
977 lic_unknown.append(licvalue[1])
978 if lic_unknown:
979 lines.append('#')
980 lines.append('# The following license files were not able to be identified and are')
981 lines.append('# represented as "Unknown" below, you will need to check them yourself:')
982 for licfile in lic_unknown:
983 lines.append('# %s' % licfile)
984
985 extra_license = tidy_licenses(extravalues.pop('LICENSE', ''))
986 if extra_license:
987 if licenses == ['Unknown']:
988 licenses = extra_license
989 else:
990 for item in extra_license:
991 if item not in licenses:
992 licenses.append(item)
993 extra_lic_files_chksum = split_value(extravalues.pop('LIC_FILES_CHKSUM', []))
994 for item in extra_lic_files_chksum:
995 if item not in lic_files_chksum:
996 lic_files_chksum.append(item)
997
998 if lic_files_chksum:
999 # We are going to set the vars, so prepend the standard disclaimer
1000 lines.insert(0, '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is')
1001 lines.insert(1, '# your responsibility to verify that the values are complete and correct.')
1002 else:
1003 # Without LIC_FILES_CHKSUM we set LICENSE = "CLOSED" to allow the
1004 # user to get started easily
1005 lines.append('# Unable to find any files that looked like license statements. Check the accompanying')
1006 lines.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.')
1007 lines.append('#')
1008 lines.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if')
1009 lines.append('# this is not accurate with respect to the licensing of the software being built (it')
1010 lines.append('# will not be in most cases) you must specify the correct value before using this')
1011 lines.append('# recipe for anything other than initial testing/development!')
1012 licenses = ['CLOSED']
1013
1014 if extra_license and sorted(licenses) != sorted(extra_license):
1015 lines.append('# NOTE: Original package / source metadata indicates license is: %s' % ' & '.join(extra_license))
1016
1017 if len(licenses) > 1:
1018 lines.append('#')
1019 lines.append('# NOTE: multiple licenses have been detected; they have been separated with &')
1020 lines.append('# in the LICENSE value for now since it is a reasonable assumption that all')
1021 lines.append('# of the licenses apply. If instead there is a choice between the multiple')
1022 lines.append('# licenses then you should change the value to separate the licenses with |')
1023 lines.append('# instead of &. If there is any doubt, check the accompanying documentation')
1024 lines.append('# to determine which situation is applicable.')
1025
1026 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold)))
1027 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
1028 lines.append('')
1029
1030 # Replace the placeholder so we get the values in the right place in the recipe file
1031 try:
1032 pos = lines_before.index('##LICENSE_PLACEHOLDER##')
1033 except ValueError:
1034 pos = -1
1035 if pos == -1:
1036 lines_before.extend(lines)
1037 else:
1038 lines_before[pos:pos+1] = lines
1039
1040 handled.append(('license', licvalues))
1041 return licvalues
1042
1043def get_license_md5sums(d, static_only=False, linenumbers=False):
1044 import bb.utils
1045 import csv
1046 md5sums = {}
1047 if not static_only and not linenumbers:
1048 # Gather md5sums of license files in common license dir
1049 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1050 for fn in os.listdir(commonlicdir):
1051 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
1052 md5sums[md5value] = fn
1053
1054 # The following were extracted from common values in various recipes
1055 # (double checking the license against the license file itself, not just
1056 # the LICENSE value in the recipe)
1057
1058 # Read license md5sums from csv file
1059 scripts_path = os.path.dirname(os.path.realpath(__file__))
1060 for path in (d.getVar('BBPATH').split(':')
1061 + [os.path.join(scripts_path, '..', '..')]):
1062 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv')
1063 if os.path.isfile(csv_path):
1064 with open(csv_path, newline='') as csv_file:
1065 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5']
1066 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
1067 for row in reader:
1068 if linenumbers:
1069 md5sums[row['md5sum']] = (
1070 row['license'], row['beginline'], row['endline'], row['md5'])
1071 else:
1072 md5sums[row['md5sum']] = row['license']
1073
1074 return md5sums
1075
1076def crunch_known_licenses(d):
1077 '''
1078 Calculate the MD5 checksums for the crunched versions of all common
1079 licenses. Also add additional known checksums.
1080 '''
1081
1082 crunched_md5sums = {}
1083
1084 # common licenses
1085 crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only'
1086 crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only'
1087 crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only'
1088
1089 # The following two were gleaned from the "forever" npm package
1090 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
1091 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1092 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause'
1093 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1094 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only'
1095 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1096 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only'
1097 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1098 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only'
1099 # unixODBC-2.3.4 COPYING
1100 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only'
1101 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1102 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only'
1103 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1104 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1105
1106 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD
1107 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause'
1108 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE
1109 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause'
1110 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE
1111 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause'
1112 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE
1113 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause'
1114 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE
1115 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause'
1116 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE
1117 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause'
1118 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE
1119 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause'
1120 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE
1121 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause'
1122 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE
1123 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause'
1124 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE
1125 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT'
1126 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE
1127 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT'
1128 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE
1129 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0'
1130 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md
1131 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0'
1132 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE
1133 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0'
1134 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt
1135 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0'
1136 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE
1137 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0'
1138 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE
1139 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense'
1140 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
1141 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
1142
1143 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1144 for fn in sorted(os.listdir(commonlicdir)):
1145 md5value, lictext = crunch_license(os.path.join(commonlicdir, fn))
1146 if md5value not in crunched_md5sums:
1147 crunched_md5sums[md5value] = fn
1148 elif fn != crunched_md5sums[md5value]:
1149 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn))
1150 else:
1151 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value]))
1152
1153 return crunched_md5sums
1154
1155def crunch_license(licfile):
1156 '''
1157 Remove non-material text from a license file and then calculate its
1158 md5sum. This works well for licenses that contain a copyright statement,
1159 but is also a useful way to handle people's insistence upon reformatting
1160 the license text slightly (with no material difference to the text of the
1161 license).
1162 '''
1163
1164 import oe.utils
1165
1166 # Note: these are carefully constructed!
1167 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
1168 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1169 copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
1170 disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
1171 email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
1172 header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
1173 tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
1174 url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
1175
1176 lictext = []
1177 with open(licfile, 'r', errors='surrogateescape') as f:
1178 for line in f:
1179 # Drop opening statements
1180 if copyright_re.match(line):
1181 continue
1182 elif disclaimer_re.match(line):
1183 continue
1184 elif email_re.match(line):
1185 continue
1186 elif header_re.match(line):
1187 continue
1188 elif tag_re.match(line):
1189 continue
1190 elif url_re.match(line):
1191 continue
1192 elif license_title_re.match(line):
1193 continue
1194 elif license_statement_re.match(line):
1195 continue
1196 # Strip comment symbols
1197 line = line.replace('*', '') \
1198 .replace('#', '')
1199 # Unify spelling
1200 line = line.replace('sub-license', 'sublicense')
1201 # Squash spaces
1202 line = oe.utils.squashspaces(line.strip())
1203 # Replace smart quotes, double quotes and backticks with single quotes
1204 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
1205 # Unify brackets
1206 line = line.replace("{", "[").replace("}", "]")
1207 if line:
1208 lictext.append(line)
1209
1210 m = hashlib.md5()
1211 try:
1212 m.update(' '.join(lictext).encode('utf-8'))
1213 md5val = m.hexdigest()
1214 except UnicodeEncodeError:
1215 md5val = None
1216 lictext = ''
1217 return md5val, lictext
1218
1219def find_license_files(srctree):
1220 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
1221 skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go")
1222 licfiles = []
1223 for root, dirs, files in os.walk(srctree):
1224 for fn in files:
1225 if fn.endswith(skip_extensions):
1226 continue
1227 for spec in licspecs:
1228 if fnmatch.fnmatch(fn, spec):
1229 fullpath = os.path.join(root, fn)
1230 if not fullpath in licfiles:
1231 licfiles.append(fullpath)
1232
1233 return licfiles
1234
1235def match_licenses(licfiles, srctree, d):
1236 import bb
1237 md5sums = get_license_md5sums(d)
1238
1239 crunched_md5sums = crunch_known_licenses(d)
1240
1241 licenses = []
1242 for licfile in sorted(licfiles):
1243 resolved_licfile = d.expand(licfile)
1244 md5value = bb.utils.md5_file(resolved_licfile)
1245 license = md5sums.get(md5value, None)
1246 if not license:
1247 crunched_md5, lictext = crunch_license(resolved_licfile)
1248 license = crunched_md5sums.get(crunched_md5, None)
1249 if lictext and not license:
1250 license = 'Unknown'
1251 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
1252 "and replace `Unknown` with the license:\n" \
1253 "%s,Unknown" % (os.path.relpath(licfile, srctree + "/.."), md5value))
1254 if license:
1255 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
1256
1257 return licenses
1258
1259def find_licenses(srctree, d):
1260 licfiles = find_license_files(srctree)
1261 licenses = match_licenses(licfiles, srctree, d)
1262
1263 # FIXME should we grab at least one source file with a license header and add that too?
1264
1265 return licenses
1266
1267def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
1268 """
1269 Given a list of (license, path, md5sum) as returned by match_licenses(),
1270 a dict of package name to path mappings, write out a set of
1271 package-specific LICENSE values.
1272 """
1273 pkglicenses = {pn: []}
1274 for license, licpath, _ in licvalues:
1275 license = fixup_license(license)
1276 for pkgname, pkgpath in packages.items():
1277 if licpath.startswith(pkgpath + '/'):
1278 if pkgname in pkglicenses:
1279 pkglicenses[pkgname].append(license)
1280 else:
1281 pkglicenses[pkgname] = [license]
1282 break
1283 else:
1284 # Accumulate on the main package
1285 pkglicenses[pn].append(license)
1286 outlicenses = {}
1287 for pkgname in packages:
1288 # Assume AND operator between license files
1289 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown'
1290 if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses:
1291 license = fallback_licenses[pkgname]
1292 licenses = tidy_licenses(license)
1293 license = ' & '.join(licenses)
1294 outlines.append('LICENSE:%s = "%s"' % (pkgname, license))
1295 outlicenses[pkgname] = licenses
1296 return outlicenses
1297
1298def generate_common_licenses_chksums(common_licenses, d):
1299 lic_files_chksums = []
1300 for license in tidy_licenses(common_licenses):
1301 licfile = '${COMMON_LICENSE_DIR}/' + license
1302 md5value = bb.utils.md5_file(d.expand(licfile))
1303 lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value))
1304 return lic_files_chksums
1305
1306def read_pkgconfig_provides(d):
1307 pkgdatadir = d.getVar('PKGDATA_DIR')
1308 pkgmap = {}
1309 for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')):
1310 with open(fn, 'r') as f:
1311 for line in f:
1312 pkgmap[os.path.basename(line.rstrip())] = os.path.splitext(os.path.basename(fn))[0]
1313 recipemap = {}
1314 for pc, pkg in pkgmap.items():
1315 pkgdatafile = os.path.join(pkgdatadir, 'runtime', pkg)
1316 if os.path.exists(pkgdatafile):
1317 with open(pkgdatafile, 'r') as f:
1318 for line in f:
1319 if line.startswith('PN: '):
1320 recipemap[pc] = line.split(':', 1)[1].strip()
1321 return recipemap
1322
1323def convert_debian(debpath):
1324 value_map = {'Package': 'PN',
1325 'Version': 'PV',
1326 'Section': 'SECTION',
1327 'License': 'LICENSE',
1328 'Homepage': 'HOMEPAGE'}
1329
1330 # FIXME extend this mapping - perhaps use distro_alias.inc?
1331 depmap = {'libz-dev': 'zlib'}
1332
1333 values = {}
1334 depends = []
1335 with open(os.path.join(debpath, 'control'), 'r', errors='surrogateescape') as f:
1336 indesc = False
1337 for line in f:
1338 if indesc:
1339 if line.startswith(' '):
1340 if line.startswith(' This package contains'):
1341 indesc = False
1342 else:
1343 if 'DESCRIPTION' in values:
1344 values['DESCRIPTION'] += ' ' + line.strip()
1345 else:
1346 values['DESCRIPTION'] = line.strip()
1347 else:
1348 indesc = False
1349 if not indesc:
1350 splitline = line.split(':', 1)
1351 if len(splitline) < 2:
1352 continue
1353 key = splitline[0]
1354 value = splitline[1].strip()
1355 if key == 'Build-Depends':
1356 for dep in value.split(','):
1357 dep = dep.split()[0]
1358 mapped = depmap.get(dep, '')
1359 if mapped:
1360 depends.append(mapped)
1361 elif key == 'Description':
1362 values['SUMMARY'] = value
1363 indesc = True
1364 else:
1365 varname = value_map.get(key, None)
1366 if varname:
1367 values[varname] = value
1368 postinst = os.path.join(debpath, 'postinst')
1369 postrm = os.path.join(debpath, 'postrm')
1370 preinst = os.path.join(debpath, 'preinst')
1371 prerm = os.path.join(debpath, 'prerm')
1372 sfiles = [postinst, postrm, preinst, prerm]
1373 for sfile in sfiles:
1374 if os.path.isfile(sfile):
1375 logger.info("Converting %s file to recipe function..." %
1376 os.path.basename(sfile).upper())
1377 content = []
1378 with open(sfile) as f:
1379 for line in f:
1380 if "#!/" in line:
1381 continue
1382 line = line.rstrip("\n")
1383 if line.strip():
1384 content.append(line)
1385 if content:
1386 values[os.path.basename(f.name)] = content
1387
1388 #if depends:
1389 # values['DEPENDS'] = ' '.join(depends)
1390
1391 return values
1392
1393def convert_rpm_xml(xmlfile):
1394 '''Converts the output from rpm -qp --xml to a set of variable values'''
1395 import xml.etree.ElementTree as ElementTree
1396 rpmtag_map = {'Name': 'PN',
1397 'Version': 'PV',
1398 'Summary': 'SUMMARY',
1399 'Description': 'DESCRIPTION',
1400 'License': 'LICENSE',
1401 'Url': 'HOMEPAGE'}
1402
1403 values = {}
1404 tree = ElementTree.parse(xmlfile)
1405 root = tree.getroot()
1406 for child in root:
1407 if child.tag == 'rpmTag':
1408 name = child.attrib.get('name', None)
1409 if name:
1410 varname = rpmtag_map.get(name, None)
1411 if varname:
1412 values[varname] = child[0].text
1413 return values
1414
1415
1416def register_commands(subparsers):
1417 parser_create = subparsers.add_parser('create',
1418 help='Create a new recipe',
1419 description='Creates a new recipe from a source tree')
1420 parser_create.add_argument('source', help='Path or URL to source')
1421 parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create')
1422 parser_create.add_argument('-p', '--provides', help='Specify an alias for the item provided by the recipe')
1423 parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true')
1424 parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s')
1425 parser_create.add_argument('-N', '--name', help='Name to use within recipe (PN)')
1426 parser_create.add_argument('-V', '--version', help='Version to use within recipe (PV)')
1427 parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true')
1428 parser_create.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
1429 parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
1430 group = parser_create.add_mutually_exclusive_group()
1431 group.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
1432 group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
1433 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
1434 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
1435 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
1436 parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class')
1437 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
1438 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
1439 parser_create.set_defaults(func=create_recipe)
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
deleted file mode 100644
index ec9d510e23..0000000000
--- a/scripts/lib/recipetool/create_buildsys.py
+++ /dev/null
@@ -1,875 +0,0 @@
1# Recipe creation tool - create command build system handlers
2#
3# Copyright (C) 2014-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import re
10import logging
11from recipetool.create import RecipeHandler, validate_pv
12
13logger = logging.getLogger('recipetool')
14
15tinfoil = None
16plugins = None
17
18def plugin_init(pluginlist):
19 # Take a reference to the list so we can use it later
20 global plugins
21 plugins = pluginlist
22
23def tinfoil_init(instance):
24 global tinfoil
25 tinfoil = instance
26
27
28class CmakeRecipeHandler(RecipeHandler):
29 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
30 if 'buildsystem' in handled:
31 return False
32
33 if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']):
34 classes.append('cmake')
35 values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues)
36 classes.extend(values.pop('inherit', '').split())
37 for var, value in values.items():
38 lines_before.append('%s = "%s"' % (var, value))
39 lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:')
40 lines_after.append('EXTRA_OECMAKE = ""')
41 lines_after.append('')
42 handled.append('buildsystem')
43 return True
44 return False
45
46 @staticmethod
47 def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None):
48 # Find all plugins that want to register handlers
49 logger.debug('Loading cmake handlers')
50 handlers = []
51 for plugin in plugins:
52 if hasattr(plugin, 'register_cmake_handlers'):
53 plugin.register_cmake_handlers(handlers)
54
55 values = {}
56 inherits = []
57
58 if cmakelistsfile:
59 srcfiles = [cmakelistsfile]
60 else:
61 srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt'])
62
63 # Note that some of these are non-standard, but probably better to
64 # be able to map them anyway if we see them
65 cmake_pkgmap = {'alsa': 'alsa-lib',
66 'aspell': 'aspell',
67 'atk': 'atk',
68 'bison': 'bison-native',
69 'boost': 'boost',
70 'bzip2': 'bzip2',
71 'cairo': 'cairo',
72 'cups': 'cups',
73 'curl': 'curl',
74 'curses': 'ncurses',
75 'cvs': 'cvs',
76 'drm': 'libdrm',
77 'dbus': 'dbus',
78 'dbusglib': 'dbus-glib',
79 'egl': 'virtual/egl',
80 'expat': 'expat',
81 'flex': 'flex-native',
82 'fontconfig': 'fontconfig',
83 'freetype': 'freetype',
84 'gettext': '',
85 'git': '',
86 'gio': 'glib-2.0',
87 'giounix': 'glib-2.0',
88 'glew': 'glew',
89 'glib': 'glib-2.0',
90 'glib2': 'glib-2.0',
91 'glu': 'libglu',
92 'glut': 'freeglut',
93 'gobject': 'glib-2.0',
94 'gperf': 'gperf-native',
95 'gnutls': 'gnutls',
96 'gtk2': 'gtk+',
97 'gtk3': 'gtk+3',
98 'gtk': 'gtk+3',
99 'harfbuzz': 'harfbuzz',
100 'icu': 'icu',
101 'intl': 'virtual/libintl',
102 'jpeg': 'jpeg',
103 'libarchive': 'libarchive',
104 'libiconv': 'virtual/libiconv',
105 'liblzma': 'xz',
106 'libxml2': 'libxml2',
107 'libxslt': 'libxslt',
108 'opengl': 'virtual/libgl',
109 'openmp': '',
110 'openssl': 'openssl',
111 'pango': 'pango',
112 'perl': '',
113 'perllibs': '',
114 'pkgconfig': '',
115 'png': 'libpng',
116 'pthread': '',
117 'pythoninterp': '',
118 'pythonlibs': '',
119 'ruby': 'ruby-native',
120 'sdl': 'libsdl',
121 'sdl2': 'libsdl2',
122 'subversion': 'subversion-native',
123 'swig': 'swig-native',
124 'tcl': 'tcl-native',
125 'threads': '',
126 'tiff': 'tiff',
127 'wget': 'wget',
128 'x11': 'libx11',
129 'xcb': 'libxcb',
130 'xext': 'libxext',
131 'xfixes': 'libxfixes',
132 'zlib': 'zlib',
133 }
134
135 pcdeps = []
136 libdeps = []
137 deps = []
138 unmappedpkgs = []
139
140 proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
141 pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
142 pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
143 findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
144 findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
145 checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
146 include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
147 subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
148 dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
149
150 def find_cmake_package(pkg):
151 RecipeHandler.load_devel_filemap(tinfoil.config_data)
152 for fn, pn in RecipeHandler.recipecmakefilemap.items():
153 splitname = fn.split('/')
154 if len(splitname) > 1:
155 if splitname[0].lower().startswith(pkg.lower()):
156 if splitname[1] == '%s-config.cmake' % pkg.lower() or splitname[1] == '%sConfig.cmake' % pkg or splitname[1] == 'Find%s.cmake' % pkg:
157 return pn
158 return None
159
160 def interpret_value(value):
161 return value.strip('"')
162
163 def parse_cmake_file(fn, paths=None):
164 searchpaths = (paths or []) + [os.path.dirname(fn)]
165 logger.debug('Parsing file %s' % fn)
166 with open(fn, 'r', errors='surrogateescape') as f:
167 for line in f:
168 line = line.strip()
169 for handler in handlers:
170 if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
171 continue
172 res = include_re.match(line)
173 if res:
174 includefn = bb.utils.which(':'.join(searchpaths), res.group(1))
175 if includefn:
176 parse_cmake_file(includefn, searchpaths)
177 else:
178 logger.debug('Unable to recurse into include file %s' % res.group(1))
179 continue
180 res = subdir_re.match(line)
181 if res:
182 subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt')
183 if os.path.exists(subdirfn):
184 parse_cmake_file(subdirfn, searchpaths)
185 else:
186 logger.debug('Unable to recurse into subdirectory file %s' % subdirfn)
187 continue
188 res = proj_re.match(line)
189 if res:
190 extravalues['PN'] = interpret_value(res.group(1).split()[0])
191 continue
192 res = pkgcm_re.match(line)
193 if res:
194 res = dep_re.findall(res.group(2))
195 if res:
196 pcdeps.extend([interpret_value(x[0]) for x in res])
197 inherits.append('pkgconfig')
198 continue
199 res = pkgsm_re.match(line)
200 if res:
201 res = dep_re.findall(res.group(2))
202 if res:
203 # Note: appending a tuple here!
204 item = tuple((interpret_value(x[0]) for x in res))
205 if len(item) == 1:
206 item = item[0]
207 pcdeps.append(item)
208 inherits.append('pkgconfig')
209 continue
210 res = findpackage_re.match(line)
211 if res:
212 origpkg = res.group(1)
213 pkg = interpret_value(origpkg)
214 found = False
215 for handler in handlers:
216 if handler.process_findpackage(srctree, fn, pkg, deps, outlines, inherits, values):
217 logger.debug('Mapped CMake package %s via handler %s' % (pkg, handler.__class__.__name__))
218 found = True
219 break
220 if found:
221 continue
222 elif pkg == 'Gettext':
223 inherits.append('gettext')
224 elif pkg == 'Perl':
225 inherits.append('perlnative')
226 elif pkg == 'PkgConfig':
227 inherits.append('pkgconfig')
228 elif pkg == 'PythonInterp':
229 inherits.append('python3native')
230 elif pkg == 'PythonLibs':
231 inherits.append('python3-dir')
232 else:
233 # Try to map via looking at installed CMake packages in pkgdata
234 dep = find_cmake_package(pkg)
235 if dep:
236 logger.debug('Mapped CMake package %s to recipe %s via pkgdata' % (pkg, dep))
237 deps.append(dep)
238 else:
239 dep = cmake_pkgmap.get(pkg.lower(), None)
240 if dep:
241 logger.debug('Mapped CMake package %s to recipe %s via internal list' % (pkg, dep))
242 deps.append(dep)
243 elif dep is None:
244 unmappedpkgs.append(origpkg)
245 continue
246 res = checklib_re.match(line)
247 if res:
248 lib = interpret_value(res.group(1))
249 if not lib.startswith('$'):
250 libdeps.append(lib)
251 res = findlibrary_re.match(line)
252 if res:
253 libs = res.group(2).split()
254 for lib in libs:
255 if lib in ['HINTS', 'PATHS', 'PATH_SUFFIXES', 'DOC', 'NAMES_PER_DIR'] or lib.startswith(('NO_', 'CMAKE_', 'ONLY_CMAKE_')):
256 break
257 lib = interpret_value(lib)
258 if not lib.startswith('$'):
259 libdeps.append(lib)
260 if line.lower().startswith('useswig'):
261 deps.append('swig-native')
262 continue
263
264 parse_cmake_file(srcfiles[0])
265
266 if unmappedpkgs:
267 outlines.append('# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(list(set(unmappedpkgs))))
268
269 RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
270
271 for handler in handlers:
272 handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
273
274 if inherits:
275 values['inherit'] = ' '.join(list(set(inherits)))
276
277 return values
278
279
280class CmakeExtensionHandler(object):
281 '''Base class for CMake extension handlers'''
282 def process_line(self, srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
283 '''
284 Handle a line parsed out of an CMake file.
285 Return True if you've completely handled the passed in line, otherwise return False.
286 '''
287 return False
288
289 def process_findpackage(self, srctree, fn, pkg, deps, outlines, inherits, values):
290 '''
291 Handle a find_package package parsed out of a CMake file.
292 Return True if you've completely handled the passed in package, otherwise return False.
293 '''
294 return False
295
296 def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
297 '''
298 Apply any desired post-processing on the output
299 '''
300 return
301
302
303
304class SconsRecipeHandler(RecipeHandler):
305 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
306 if 'buildsystem' in handled:
307 return False
308
309 if RecipeHandler.checkfiles(srctree, ['SConstruct', 'Sconstruct', 'sconstruct']):
310 classes.append('scons')
311 lines_after.append('# Specify any options you want to pass to scons using EXTRA_OESCONS:')
312 lines_after.append('EXTRA_OESCONS = ""')
313 lines_after.append('')
314 handled.append('buildsystem')
315 return True
316 return False
317
318
319class QmakeRecipeHandler(RecipeHandler):
320 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
321 if 'buildsystem' in handled:
322 return False
323
324 if RecipeHandler.checkfiles(srctree, ['*.pro']):
325 classes.append('qmake2')
326 handled.append('buildsystem')
327 return True
328 return False
329
330
331class AutotoolsRecipeHandler(RecipeHandler):
332 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
333 if 'buildsystem' in handled:
334 return False
335
336 autoconf = False
337 if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']):
338 autoconf = True
339 values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, extravalues)
340 classes.extend(values.pop('inherit', '').split())
341 for var, value in values.items():
342 lines_before.append('%s = "%s"' % (var, value))
343 else:
344 conffile = RecipeHandler.checkfiles(srctree, ['configure'])
345 if conffile:
346 # Check if this is just a pre-generated autoconf configure script
347 with open(conffile[0], 'r', errors='surrogateescape') as f:
348 for i in range(1, 10):
349 if 'Generated by GNU Autoconf' in f.readline():
350 autoconf = True
351 break
352
353 if autoconf and not ('PV' in extravalues and 'PN' in extravalues):
354 # Last resort
355 conffile = RecipeHandler.checkfiles(srctree, ['configure'])
356 if conffile:
357 with open(conffile[0], 'r', errors='surrogateescape') as f:
358 for line in f:
359 line = line.strip()
360 if line.startswith('VERSION=') or line.startswith('PACKAGE_VERSION='):
361 pv = line.split('=')[1].strip('"\'')
362 if pv and not 'PV' in extravalues and validate_pv(pv):
363 extravalues['PV'] = pv
364 elif line.startswith('PACKAGE_NAME=') or line.startswith('PACKAGE='):
365 pn = line.split('=')[1].strip('"\'')
366 if pn and not 'PN' in extravalues:
367 extravalues['PN'] = pn
368
369 if autoconf:
370 lines_before.append('')
371 lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory')
372 lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the')
373 lines_before.append('# inherit line')
374 classes.append('autotools')
375 lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:')
376 lines_after.append('EXTRA_OECONF = ""')
377 lines_after.append('')
378 handled.append('buildsystem')
379 return True
380
381 return False
382
383 @staticmethod
384 def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None):
385 import shlex
386
387 # Find all plugins that want to register handlers
388 logger.debug('Loading autotools handlers')
389 handlers = []
390 for plugin in plugins:
391 if hasattr(plugin, 'register_autotools_handlers'):
392 plugin.register_autotools_handlers(handlers)
393
394 values = {}
395 inherits = []
396
397 # Hardcoded map, we also use a dynamic one based on what's in the sysroot
398 progmap = {'flex': 'flex-native',
399 'bison': 'bison-native',
400 'm4': 'm4-native',
401 'tar': 'tar-native',
402 'ar': 'binutils-native',
403 'ranlib': 'binutils-native',
404 'ld': 'binutils-native',
405 'strip': 'binutils-native',
406 'libtool': '',
407 'autoconf': '',
408 'autoheader': '',
409 'automake': '',
410 'uname': '',
411 'rm': '',
412 'cp': '',
413 'mv': '',
414 'find': '',
415 'awk': '',
416 'sed': '',
417 }
418 progclassmap = {'gconftool-2': 'gconf',
419 'pkg-config': 'pkgconfig',
420 'python': 'python3native',
421 'python3': 'python3native',
422 'perl': 'perlnative',
423 'makeinfo': 'texinfo',
424 }
425
426 pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
427 pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
428 lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
429 libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
430 progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
431 dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
432 ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
433 am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
434 define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
435 version_re = re.compile(r'([0-9.]+)')
436
437 defines = {}
438 def subst_defines(value):
439 newvalue = value
440 for define, defval in defines.items():
441 newvalue = newvalue.replace(define, defval)
442 if newvalue != value:
443 return subst_defines(newvalue)
444 return value
445
446 def process_value(value):
447 value = value.replace('[', '').replace(']', '')
448 if value.startswith('m4_esyscmd(') or value.startswith('m4_esyscmd_s('):
449 cmd = subst_defines(value[value.index('(')+1:-1])
450 try:
451 if '|' in cmd:
452 cmd = 'set -o pipefail; ' + cmd
453 stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True)
454 ret = stdout.rstrip()
455 except bb.process.ExecutionError as e:
456 ret = ''
457 elif value.startswith('m4_'):
458 return None
459 ret = subst_defines(value)
460 if ret:
461 ret = ret.strip('"\'')
462 return ret
463
464 # Since a configure.ac file is essentially a program, this is only ever going to be
465 # a hack unfortunately; but it ought to be enough of an approximation
466 if acfile:
467 srcfiles = [acfile]
468 else:
469 srcfiles = RecipeHandler.checkfiles(srctree, ['acinclude.m4', 'configure.ac', 'configure.in'])
470
471 pcdeps = []
472 libdeps = []
473 deps = []
474 unmapped = []
475
476 RecipeHandler.load_binmap(tinfoil.config_data)
477
478 def process_macro(keyword, value):
479 for handler in handlers:
480 if handler.process_macro(srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
481 return
482 logger.debug('Found keyword %s with value "%s"' % (keyword, value))
483 if keyword == 'PKG_CHECK_MODULES':
484 res = pkg_re.search(value)
485 if res:
486 res = dep_re.findall(res.group(1))
487 if res:
488 pcdeps.extend([x[0] for x in res])
489 inherits.append('pkgconfig')
490 elif keyword == 'PKG_CHECK_EXISTS':
491 res = pkgce_re.search(value)
492 if res:
493 res = dep_re.findall(res.group(1))
494 if res:
495 pcdeps.extend([x[0] for x in res])
496 inherits.append('pkgconfig')
497 elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'):
498 inherits.append('gettext')
499 elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'):
500 deps.append('intltool-native')
501 elif keyword == 'AM_PATH_GLIB_2_0':
502 deps.append('glib-2.0')
503 elif keyword in ('AC_CHECK_PROG', 'AC_PATH_PROG', 'AX_WITH_PROG'):
504 res = progs_re.search(value)
505 if res:
506 for prog in shlex.split(res.group(1)):
507 prog = prog.split()[0]
508 for handler in handlers:
509 if handler.process_prog(srctree, keyword, value, prog, deps, outlines, inherits, values):
510 return
511 progclass = progclassmap.get(prog, None)
512 if progclass:
513 inherits.append(progclass)
514 else:
515 progdep = RecipeHandler.recipebinmap.get(prog, None)
516 if not progdep:
517 progdep = progmap.get(prog, None)
518 if progdep:
519 deps.append(progdep)
520 elif progdep is None:
521 if not prog.startswith('$'):
522 unmapped.append(prog)
523 elif keyword == 'AC_CHECK_LIB':
524 res = lib_re.search(value)
525 if res:
526 lib = res.group(1)
527 if not lib.startswith('$'):
528 libdeps.append(lib)
529 elif keyword == 'AX_CHECK_LIBRARY':
530 res = libx_re.search(value)
531 if res:
532 lib = res.group(2)
533 if not lib.startswith('$'):
534 header = res.group(1)
535 libdeps.append((lib, header))
536 elif keyword == 'AC_PATH_X':
537 deps.append('libx11')
538 elif keyword in ('AX_BOOST', 'BOOST_REQUIRE'):
539 deps.append('boost')
540 elif keyword in ('AC_PROG_LEX', 'AM_PROG_LEX', 'AX_PROG_FLEX'):
541 deps.append('flex-native')
542 elif keyword in ('AC_PROG_YACC', 'AX_PROG_BISON'):
543 deps.append('bison-native')
544 elif keyword == 'AX_CHECK_ZLIB':
545 deps.append('zlib')
546 elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'):
547 deps.append('openssl')
548 elif keyword in ('AX_LIB_CURL', 'LIBCURL_CHECK_CONFIG'):
549 deps.append('curl')
550 elif keyword == 'AX_LIB_BEECRYPT':
551 deps.append('beecrypt')
552 elif keyword == 'AX_LIB_EXPAT':
553 deps.append('expat')
554 elif keyword == 'AX_LIB_GCRYPT':
555 deps.append('libgcrypt')
556 elif keyword == 'AX_LIB_NETTLE':
557 deps.append('nettle')
558 elif keyword == 'AX_LIB_READLINE':
559 deps.append('readline')
560 elif keyword == 'AX_LIB_SQLITE3':
561 deps.append('sqlite3')
562 elif keyword == 'AX_LIB_TAGLIB':
563 deps.append('taglib')
564 elif keyword in ['AX_PKG_SWIG', 'AC_PROG_SWIG']:
565 deps.append('swig-native')
566 elif keyword == 'AX_PROG_XSLTPROC':
567 deps.append('libxslt-native')
568 elif keyword in ['AC_PYTHON_DEVEL', 'AX_PYTHON_DEVEL', 'AM_PATH_PYTHON']:
569 pythonclass = 'python3native'
570 elif keyword == 'AX_WITH_CURSES':
571 deps.append('ncurses')
572 elif keyword == 'AX_PATH_BDB':
573 deps.append('db')
574 elif keyword == 'AX_PATH_LIB_PCRE':
575 deps.append('libpcre')
576 elif keyword == 'AC_INIT':
577 if extravalues is not None:
578 res = ac_init_re.match(value)
579 if res:
580 extravalues['PN'] = process_value(res.group(1))
581 pv = process_value(res.group(2))
582 if validate_pv(pv):
583 extravalues['PV'] = pv
584 elif keyword == 'AM_INIT_AUTOMAKE':
585 if extravalues is not None:
586 if 'PN' not in extravalues:
587 res = am_init_re.match(value)
588 if res:
589 if res.group(1) != 'AC_PACKAGE_NAME':
590 extravalues['PN'] = process_value(res.group(1))
591 pv = process_value(res.group(2))
592 if validate_pv(pv):
593 extravalues['PV'] = pv
594 elif keyword == 'define(':
595 res = define_re.match(value)
596 if res:
597 key = res.group(2).strip('[]')
598 value = process_value(res.group(3))
599 if value is not None:
600 defines[key] = value
601
602 keywords = ['PKG_CHECK_MODULES',
603 'PKG_CHECK_EXISTS',
604 'AM_GNU_GETTEXT',
605 'AM_GLIB_GNU_GETTEXT',
606 'GETTEXT_PACKAGE',
607 'AC_PROG_INTLTOOL',
608 'IT_PROG_INTLTOOL',
609 'AM_PATH_GLIB_2_0',
610 'AC_CHECK_PROG',
611 'AC_PATH_PROG',
612 'AX_WITH_PROG',
613 'AC_CHECK_LIB',
614 'AX_CHECK_LIBRARY',
615 'AC_PATH_X',
616 'AX_BOOST',
617 'BOOST_REQUIRE',
618 'AC_PROG_LEX',
619 'AM_PROG_LEX',
620 'AX_PROG_FLEX',
621 'AC_PROG_YACC',
622 'AX_PROG_BISON',
623 'AX_CHECK_ZLIB',
624 'AX_CHECK_OPENSSL',
625 'AX_LIB_CRYPTO',
626 'AX_LIB_CURL',
627 'LIBCURL_CHECK_CONFIG',
628 'AX_LIB_BEECRYPT',
629 'AX_LIB_EXPAT',
630 'AX_LIB_GCRYPT',
631 'AX_LIB_NETTLE',
632 'AX_LIB_READLINE'
633 'AX_LIB_SQLITE3',
634 'AX_LIB_TAGLIB',
635 'AX_PKG_SWIG',
636 'AC_PROG_SWIG',
637 'AX_PROG_XSLTPROC',
638 'AC_PYTHON_DEVEL',
639 'AX_PYTHON_DEVEL',
640 'AM_PATH_PYTHON',
641 'AX_WITH_CURSES',
642 'AX_PATH_BDB',
643 'AX_PATH_LIB_PCRE',
644 'AC_INIT',
645 'AM_INIT_AUTOMAKE',
646 'define(',
647 ]
648
649 for handler in handlers:
650 handler.extend_keywords(keywords)
651
652 for srcfile in srcfiles:
653 nesting = 0
654 in_keyword = ''
655 partial = ''
656 with open(srcfile, 'r', errors='surrogateescape') as f:
657 for line in f:
658 if in_keyword:
659 partial += ' ' + line.strip()
660 if partial.endswith('\\'):
661 partial = partial[:-1]
662 nesting = nesting + line.count('(') - line.count(')')
663 if nesting == 0:
664 process_macro(in_keyword, partial)
665 partial = ''
666 in_keyword = ''
667 else:
668 for keyword in keywords:
669 if keyword in line:
670 nesting = line.count('(') - line.count(')')
671 if nesting > 0:
672 partial = line.strip()
673 if partial.endswith('\\'):
674 partial = partial[:-1]
675 in_keyword = keyword
676 else:
677 process_macro(keyword, line.strip())
678 break
679
680 if in_keyword:
681 process_macro(in_keyword, partial)
682
683 if extravalues:
684 for k,v in list(extravalues.items()):
685 if v:
686 if v.startswith('$') or v.startswith('@') or v.startswith('%'):
687 del extravalues[k]
688 else:
689 extravalues[k] = v.strip('"\'').rstrip('()')
690
691 if unmapped:
692 outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped))))
693
694 RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
695
696 for handler in handlers:
697 handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
698
699 if inherits:
700 values['inherit'] = ' '.join(list(set(inherits)))
701
702 return values
703
704
705class AutotoolsExtensionHandler(object):
706 '''Base class for Autotools extension handlers'''
707 def process_macro(self, srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
708 '''
709 Handle a macro parsed out of an autotools file. Note that if you want this to be called
710 for any macro other than the ones AutotoolsRecipeHandler already looks for, you'll need
711 to add it to the keywords list in extend_keywords().
712 Return True if you've completely handled the passed in macro, otherwise return False.
713 '''
714 return False
715
716 def extend_keywords(self, keywords):
717 '''Adds keywords to be recognised by the parser (so that you get a call to process_macro)'''
718 return
719
720 def process_prog(self, srctree, keyword, value, prog, deps, outlines, inherits, values):
721 '''
722 Handle an AC_PATH_PROG, AC_CHECK_PROG etc. line
723 Return True if you've completely handled the passed in macro, otherwise return False.
724 '''
725 return False
726
727 def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
728 '''
729 Apply any desired post-processing on the output
730 '''
731 return
732
733
734class MakefileRecipeHandler(RecipeHandler):
735 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
736 if 'buildsystem' in handled:
737 return False
738
739 makefile = RecipeHandler.checkfiles(srctree, ['Makefile', 'makefile', 'GNUmakefile'])
740 if makefile:
741 lines_after.append('# NOTE: this is a Makefile-only piece of software, so we cannot generate much of the')
742 lines_after.append('# recipe automatically - you will need to examine the Makefile yourself and ensure')
743 lines_after.append('# that the appropriate arguments are passed in.')
744 lines_after.append('')
745
746 scanfile = os.path.join(srctree, 'configure.scan')
747 skipscan = False
748 try:
749 stdout, stderr = bb.process.run('autoscan', cwd=srctree, shell=True)
750 except bb.process.ExecutionError as e:
751 skipscan = True
752 if scanfile and os.path.exists(scanfile):
753 values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, acfile=scanfile)
754 classes.extend(values.pop('inherit', '').split())
755 for var, value in values.items():
756 if var == 'DEPENDS':
757 lines_before.append('# NOTE: some of these dependencies may be optional, check the Makefile and/or upstream documentation')
758 lines_before.append('%s = "%s"' % (var, value))
759 lines_before.append('')
760 for f in ['configure.scan', 'autoscan.log']:
761 fp = os.path.join(srctree, f)
762 if os.path.exists(fp):
763 os.remove(fp)
764
765 self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
766
767 func = []
768 func.append('# You will almost certainly need to add additional arguments here')
769 func.append('oe_runmake')
770 self.genfunction(lines_after, 'do_compile', func)
771
772 installtarget = True
773 try:
774 stdout, stderr = bb.process.run('make -n install', cwd=srctree, shell=True)
775 except bb.process.ExecutionError as e:
776 if e.exitcode != 1:
777 installtarget = False
778 func = []
779 if installtarget:
780 func.append('# This is a guess; additional arguments may be required')
781 makeargs = ''
782 with open(makefile[0], 'r', errors='surrogateescape') as f:
783 for i in range(1, 100):
784 if 'DESTDIR' in f.readline():
785 makeargs += " 'DESTDIR=${D}'"
786 break
787 func.append('oe_runmake install%s' % makeargs)
788 else:
789 func.append('# NOTE: unable to determine what to put here - there is a Makefile but no')
790 func.append('# target named "install", so you will need to define this yourself')
791 self.genfunction(lines_after, 'do_install', func)
792
793 handled.append('buildsystem')
794 else:
795 lines_after.append('# NOTE: no Makefile found, unable to determine what needs to be done')
796 lines_after.append('')
797 self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
798 self.genfunction(lines_after, 'do_compile', ['# Specify compilation commands here'])
799 self.genfunction(lines_after, 'do_install', ['# Specify install commands here'])
800
801
802class VersionFileRecipeHandler(RecipeHandler):
803 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
804 if 'PV' not in extravalues:
805 # Look for a VERSION or version file containing a single line consisting
806 # only of a version number
807 filelist = RecipeHandler.checkfiles(srctree, ['VERSION', 'version'])
808 version = None
809 for fileitem in filelist:
810 linecount = 0
811 with open(fileitem, 'r', errors='surrogateescape') as f:
812 for line in f:
813 line = line.rstrip().strip('"\'')
814 linecount += 1
815 if line:
816 if linecount > 1:
817 version = None
818 break
819 else:
820 if validate_pv(line):
821 version = line
822 if version:
823 extravalues['PV'] = version
824 break
825
826
827class SpecFileRecipeHandler(RecipeHandler):
828 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
829 if 'PV' in extravalues and 'PN' in extravalues:
830 return
831 filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True)
832 valuemap = {'Name': 'PN',
833 'Version': 'PV',
834 'Summary': 'SUMMARY',
835 'Url': 'HOMEPAGE',
836 'License': 'LICENSE'}
837 foundvalues = {}
838 for fileitem in filelist:
839 linecount = 0
840 with open(fileitem, 'r', errors='surrogateescape') as f:
841 for line in f:
842 for value, varname in valuemap.items():
843 if line.startswith(value + ':') and not varname in foundvalues:
844 foundvalues[varname] = line.split(':', 1)[1].strip()
845 break
846 if len(foundvalues) == len(valuemap):
847 break
848 # Drop values containing unexpanded RPM macros
849 for k in list(foundvalues.keys()):
850 if '%' in foundvalues[k]:
851 del foundvalues[k]
852 if 'PV' in foundvalues:
853 if not validate_pv(foundvalues['PV']):
854 del foundvalues['PV']
855 license = foundvalues.pop('LICENSE', None)
856 if license:
857 liccomment = '# NOTE: spec file indicates the license may be "%s"' % license
858 for i, line in enumerate(lines_before):
859 if line.startswith('LICENSE ='):
860 lines_before.insert(i, liccomment)
861 break
862 else:
863 lines_before.append(liccomment)
864 extravalues.update(foundvalues)
865
866def register_recipe_handlers(handlers):
867 # Set priorities with some gaps so that other plugins can insert
868 # their own handlers (so avoid changing these numbers)
869 handlers.append((CmakeRecipeHandler(), 50))
870 handlers.append((AutotoolsRecipeHandler(), 40))
871 handlers.append((SconsRecipeHandler(), 30))
872 handlers.append((QmakeRecipeHandler(), 20))
873 handlers.append((MakefileRecipeHandler(), 10))
874 handlers.append((VersionFileRecipeHandler(), -1))
875 handlers.append((SpecFileRecipeHandler(), -1))
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py
deleted file mode 100644
index a807dafae5..0000000000
--- a/scripts/lib/recipetool/create_buildsys_python.py
+++ /dev/null
@@ -1,1124 +0,0 @@
1# Recipe creation tool - create build system handler for python
2#
3# Copyright (C) 2015 Mentor Graphics Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import ast
9import codecs
10import collections
11import setuptools.command.build_py
12import email
13import importlib
14import glob
15import itertools
16import logging
17import os
18import re
19import sys
20import subprocess
21import json
22import urllib.request
23from recipetool.create import RecipeHandler
24from urllib.parse import urldefrag
25from recipetool.create import determine_from_url
26
27logger = logging.getLogger('recipetool')
28
29tinfoil = None
30
31
32def tinfoil_init(instance):
33 global tinfoil
34 tinfoil = instance
35
36
37class PythonRecipeHandler(RecipeHandler):
38 base_pkgdeps = ['python3-core']
39 excluded_pkgdeps = ['python3-dbg']
40 # os.path is provided by python3-core
41 assume_provided = ['builtins', 'os.path']
42 # Assumes that the host python3 builtin_module_names is sane for target too
43 assume_provided = assume_provided + list(sys.builtin_module_names)
44 excluded_fields = []
45
46
47 classifier_license_map = {
48 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
49 'License :: OSI Approved :: Apache Software License': 'Apache',
50 'License :: OSI Approved :: Apple Public Source License': 'APSL',
51 'License :: OSI Approved :: Artistic License': 'Artistic',
52 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
53 'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
54 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0',
55 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1',
56 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0',
57 'License :: OSI Approved :: Common Public License': 'CPL',
58 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0',
59 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0',
60 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
61 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
62 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
63 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2',
64 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only',
65 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later',
66 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
67 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
68 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only',
69 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later',
70 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only',
71 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later',
72 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only',
73 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later',
74 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only',
75 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later',
76 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
77 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND',
78 'License :: OSI Approved :: IBM Public License': 'IPL',
79 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
80 'License :: OSI Approved :: Intel Open Source License': 'Intel',
81 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
82 'License :: OSI Approved :: MIT License': 'MIT',
83 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0',
84 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
85 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS',
86 'License :: OSI Approved :: Motosoto License': 'Motosoto',
87 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
88 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
89 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
90 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
91 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
92 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
93 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0',
94 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL',
95 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
96 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0',
97 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
98 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
99 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1',
100 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
101 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL',
102 'License :: OSI Approved :: Sun Public License': 'SPL',
103 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense',
104 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0',
105 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
106 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
107 'License :: OSI Approved :: W3C License': 'W3C',
108 'License :: OSI Approved :: X.Net License': 'Xnet',
109 'License :: OSI Approved :: Zope Public License': 'ZPL',
110 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
111 'License :: Other/Proprietary License': 'Proprietary',
112 'License :: Public Domain': 'PD',
113 }
114
115 def __init__(self):
116 pass
117
118 def process_url(self, args, classes, handled, extravalues):
119 """
120 Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/...
121 which corresponds to the archive location, and add pypi class
122 """
123
124 if 'url' in handled:
125 return None
126
127 fetch_uri = None
128 source = args.source
129 required_version = args.version if args.version else None
130 match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0])
131 if match:
132 package = match.group(1)
133 version = match.group(2) if match.group(2) else required_version
134
135 json_url = f"https://pypi.org/pypi/%s/json" % package
136 response = urllib.request.urlopen(json_url)
137 if response.status == 200:
138 data = json.loads(response.read())
139 if not version:
140 # grab latest version
141 version = data["info"]["version"]
142 pypi_package = data["info"]["name"]
143 for release in reversed(data["releases"][version]):
144 if release["packagetype"] == "sdist":
145 fetch_uri = release["url"]
146 break
147 else:
148 logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url)
149 return None
150 else:
151 match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source)
152 if match:
153 fetch_uri = source
154 pypi_package = match.group(1)
155 _, version = determine_from_url(fetch_uri)
156
157 if match and not args.no_pypi:
158 if required_version and version != required_version:
159 raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version))
160 # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass)
161 # but at this point we cannot know because because user can specify the output name of the recipe on the command line
162 extravalues["PYPI_PACKAGE"] = pypi_package
163 # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe
164 pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri)
165 if pypi_package_ext:
166 pypi_package_ext = pypi_package_ext.group(1)
167 if pypi_package_ext != "tar.gz":
168 extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext
169
170 # Pypi class will handle S and SRC_URI variables, so remove them
171 # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to:
172 # extravalues['SRC_URI(?:\[.*?\])?'] = None
173 extravalues['S'] = None
174 extravalues['SRC_URI'] = None
175
176 classes.append('pypi')
177
178 handled.append('url')
179 return fetch_uri
180
181 def handle_classifier_license(self, classifiers, existing_licenses=""):
182
183 licenses = []
184 for classifier in classifiers:
185 if classifier in self.classifier_license_map:
186 license = self.classifier_license_map[classifier]
187 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
188 license = 'Apache-2.0'
189 elif license == 'GPL':
190 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
191 license = 'GPL-2.0'
192 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
193 license = 'GPL-3.0'
194 elif license == 'LGPL':
195 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
196 license = 'LGPL-2.1'
197 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
198 license = 'LGPL-2.0'
199 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
200 license = 'LGPL-3.0'
201 licenses.append(license)
202
203 if licenses:
204 return ' & '.join(licenses)
205
206 return None
207
208 def map_info_to_bbvar(self, info, extravalues):
209
210 # Map PKG-INFO & setup.py fields to bitbake variables
211 for field, values in info.items():
212 if field in self.excluded_fields:
213 continue
214
215 if field not in self.bbvar_map:
216 continue
217
218 if isinstance(values, str):
219 value = values
220 else:
221 value = ' '.join(str(v) for v in values if v)
222
223 bbvar = self.bbvar_map[field]
224 if bbvar == "PN":
225 # by convention python recipes start with "python3-"
226 if not value.startswith('python'):
227 value = 'python3-' + value
228
229 if bbvar not in extravalues and value:
230 extravalues[bbvar] = value
231
232 def apply_info_replacements(self, info):
233 if not self.replacements:
234 return
235
236 for variable, search, replace in self.replacements:
237 if variable not in info:
238 continue
239
240 def replace_value(search, replace, value):
241 if replace is None:
242 if re.search(search, value):
243 return None
244 else:
245 new_value = re.sub(search, replace, value)
246 if value != new_value:
247 return new_value
248 return value
249
250 value = info[variable]
251 if isinstance(value, str):
252 new_value = replace_value(search, replace, value)
253 if new_value is None:
254 del info[variable]
255 elif new_value != value:
256 info[variable] = new_value
257 elif hasattr(value, 'items'):
258 for dkey, dvalue in list(value.items()):
259 new_list = []
260 for pos, a_value in enumerate(dvalue):
261 new_value = replace_value(search, replace, a_value)
262 if new_value is not None and new_value != value:
263 new_list.append(new_value)
264
265 if value != new_list:
266 value[dkey] = new_list
267 else:
268 new_list = []
269 for pos, a_value in enumerate(value):
270 new_value = replace_value(search, replace, a_value)
271 if new_value is not None and new_value != value:
272 new_list.append(new_value)
273
274 if value != new_list:
275 info[variable] = new_list
276
277
278 def scan_python_dependencies(self, paths):
279 deps = set()
280 try:
281 dep_output = self.run_command(['pythondeps', '-d'] + paths)
282 except (OSError, subprocess.CalledProcessError):
283 pass
284 else:
285 for line in dep_output.splitlines():
286 line = line.rstrip()
287 dep, filename = line.split('\t', 1)
288 if filename.endswith('/setup.py'):
289 continue
290 deps.add(dep)
291
292 try:
293 provides_output = self.run_command(['pythondeps', '-p'] + paths)
294 except (OSError, subprocess.CalledProcessError):
295 pass
296 else:
297 provides_lines = (l.rstrip() for l in provides_output.splitlines())
298 provides = set(l for l in provides_lines if l and l != 'setup')
299 deps -= provides
300
301 return deps
302
303 def parse_pkgdata_for_python_packages(self):
304 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
305
306 ldata = tinfoil.config_data.createCopy()
307 bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
308 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
309
310 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
311 python_dirs = [python_sitedir + os.sep,
312 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
313 os.path.dirname(python_sitedir) + os.sep]
314 packages = {}
315 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
316 files_info = None
317 with open(pkgdatafile, 'r') as f:
318 for line in f.readlines():
319 field, value = line.split(': ', 1)
320 if field.startswith('FILES_INFO'):
321 files_info = ast.literal_eval(value)
322 break
323 else:
324 continue
325
326 for fn in files_info:
327 for suffix in importlib.machinery.all_suffixes():
328 if fn.endswith(suffix):
329 break
330 else:
331 continue
332
333 if fn.startswith(dynload_dir + os.sep):
334 if '/.debug/' in fn:
335 continue
336 base = os.path.basename(fn)
337 provided = base.split('.', 1)[0]
338 packages[provided] = os.path.basename(pkgdatafile)
339 continue
340
341 for python_dir in python_dirs:
342 if fn.startswith(python_dir):
343 relpath = fn[len(python_dir):]
344 relstart, _, relremaining = relpath.partition(os.sep)
345 if relstart.endswith('.egg'):
346 relpath = relremaining
347 base, _ = os.path.splitext(relpath)
348
349 if '/.debug/' in base:
350 continue
351 if os.path.basename(base) == '__init__':
352 base = os.path.dirname(base)
353 base = base.replace(os.sep + os.sep, os.sep)
354 provided = base.replace(os.sep, '.')
355 packages[provided] = os.path.basename(pkgdatafile)
356 return packages
357
358 @classmethod
359 def run_command(cls, cmd, **popenargs):
360 if 'stderr' not in popenargs:
361 popenargs['stderr'] = subprocess.STDOUT
362 try:
363 return subprocess.check_output(cmd, **popenargs).decode('utf-8')
364 except OSError as exc:
365 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
366 raise
367 except subprocess.CalledProcessError as exc:
368 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
369 raise
370
371class PythonSetupPyRecipeHandler(PythonRecipeHandler):
372 bbvar_map = {
373 'Name': 'PN',
374 'Version': 'PV',
375 'Home-page': 'HOMEPAGE',
376 'Summary': 'SUMMARY',
377 'Description': 'DESCRIPTION',
378 'License': 'LICENSE',
379 'Requires': 'RDEPENDS:${PN}',
380 'Provides': 'RPROVIDES:${PN}',
381 'Obsoletes': 'RREPLACES:${PN}',
382 }
383 # PN/PV are already set by recipetool core & desc can be extremely long
384 excluded_fields = [
385 'Description',
386 ]
387 setup_parse_map = {
388 'Url': 'Home-page',
389 'Classifiers': 'Classifier',
390 'Description': 'Summary',
391 }
392 setuparg_map = {
393 'Home-page': 'url',
394 'Classifier': 'classifiers',
395 'Summary': 'description',
396 'Description': 'long-description',
397 }
398 # Values which are lists, used by the setup.py argument based metadata
399 # extraction method, to determine how to process the setup.py output.
400 setuparg_list_fields = [
401 'Classifier',
402 'Requires',
403 'Provides',
404 'Obsoletes',
405 'Platform',
406 'Supported-Platform',
407 ]
408 setuparg_multi_line_values = ['Description']
409
410 replacements = [
411 ('License', r' +$', ''),
412 ('License', r'^ +', ''),
413 ('License', r' ', '-'),
414 ('License', r'^GNU-', ''),
415 ('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''),
416 ('License', r'^UNKNOWN$', ''),
417
418 # Remove currently unhandled version numbers from these variables
419 ('Requires', r' *\([^)]*\)', ''),
420 ('Provides', r' *\([^)]*\)', ''),
421 ('Obsoletes', r' *\([^)]*\)', ''),
422 ('Install-requires', r'^([^><= ]+).*', r'\1'),
423 ('Extras-require', r'^([^><= ]+).*', r'\1'),
424 ('Tests-require', r'^([^><= ]+).*', r'\1'),
425
426 # Remove unhandled dependency on particular features (e.g. foo[PDF])
427 ('Install-requires', r'\[[^\]]+\]$', ''),
428 ]
429
430 def __init__(self):
431 pass
432
433 def parse_setup_py(self, setupscript='./setup.py'):
434 with codecs.open(setupscript) as f:
435 info, imported_modules, non_literals, extensions = gather_setup_info(f)
436
437 def _map(key):
438 key = key.replace('_', '-')
439 key = key[0].upper() + key[1:]
440 if key in self.setup_parse_map:
441 key = self.setup_parse_map[key]
442 return key
443
444 # Naive mapping of setup() arguments to PKG-INFO field names
445 for d in [info, non_literals]:
446 for key, value in list(d.items()):
447 if key is None:
448 continue
449 new_key = _map(key)
450 if new_key != key:
451 del d[key]
452 d[new_key] = value
453
454 return info, 'setuptools' in imported_modules, non_literals, extensions
455
456 def get_setup_args_info(self, setupscript='./setup.py'):
457 cmd = ['python3', setupscript]
458 info = {}
459 keys = set(self.bbvar_map.keys())
460 keys |= set(self.setuparg_list_fields)
461 keys |= set(self.setuparg_multi_line_values)
462 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
463 for index, keys in grouped_keys:
464 if index == (True, False):
465 # Splitlines output for each arg as a list value
466 for key in keys:
467 arg = self.setuparg_map.get(key, key.lower())
468 try:
469 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
470 except (OSError, subprocess.CalledProcessError):
471 pass
472 else:
473 info[key] = [l.rstrip() for l in arg_info.splitlines()]
474 elif index == (False, True):
475 # Entire output for each arg
476 for key in keys:
477 arg = self.setuparg_map.get(key, key.lower())
478 try:
479 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
480 except (OSError, subprocess.CalledProcessError):
481 pass
482 else:
483 info[key] = arg_info
484 else:
485 info.update(self.get_setup_byline(list(keys), setupscript))
486 return info
487
488 def get_setup_byline(self, fields, setupscript='./setup.py'):
489 info = {}
490
491 cmd = ['python3', setupscript]
492 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
493 try:
494 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
495 except (OSError, subprocess.CalledProcessError):
496 pass
497 else:
498 if len(fields) != len(info_lines):
499 logger.error('Mismatch between setup.py output lines and number of fields')
500 sys.exit(1)
501
502 for lineno, line in enumerate(info_lines):
503 line = line.rstrip()
504 info[fields[lineno]] = line
505 return info
506
507 def get_pkginfo(self, pkginfo_fn):
508 msg = email.message_from_file(open(pkginfo_fn, 'r'))
509 msginfo = {}
510 for field in msg.keys():
511 values = msg.get_all(field)
512 if len(values) == 1:
513 msginfo[field] = values[0]
514 else:
515 msginfo[field] = values
516 return msginfo
517
518 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
519 if 'Package-dir' in setup_info:
520 package_dir = setup_info['Package-dir']
521 else:
522 package_dir = {}
523
524 dist = setuptools.Distribution()
525
526 class PackageDir(setuptools.command.build_py.build_py):
527 def __init__(self, package_dir):
528 self.package_dir = package_dir
529 self.dist = dist
530 super().__init__(self.dist)
531
532 pd = PackageDir(package_dir)
533 to_scan = []
534 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
535 if 'Py-modules' in setup_info:
536 for module in setup_info['Py-modules']:
537 try:
538 package, module = module.rsplit('.', 1)
539 except ValueError:
540 package, module = '.', module
541 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
542 to_scan.append(module_path)
543
544 if 'Packages' in setup_info:
545 for package in setup_info['Packages']:
546 to_scan.append(pd.get_package_dir(package))
547
548 if 'Scripts' in setup_info:
549 to_scan.extend(setup_info['Scripts'])
550 else:
551 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.")
552
553 if not to_scan:
554 to_scan = ['.']
555
556 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
557
558 provided_packages = self.parse_pkgdata_for_python_packages()
559 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
560 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
561 for dep in scanned_deps:
562 mapped = provided_packages.get(dep)
563 if mapped:
564 logger.debug('Mapped %s to %s' % (dep, mapped))
565 mapped_deps.add(mapped)
566 else:
567 logger.debug('Could not map %s' % dep)
568 unmapped_deps.add(dep)
569 return mapped_deps, unmapped_deps
570
571 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
572
573 if 'buildsystem' in handled:
574 return False
575
576 logger.debug("Trying setup.py parser")
577
578 # Check for non-zero size setup.py files
579 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
580 for fn in setupfiles:
581 if os.path.getsize(fn):
582 break
583 else:
584 logger.debug("No setup.py found")
585 return False
586
587 # setup.py is always parsed to get at certain required information, such as
588 # distutils vs setuptools
589 #
590 # If egg info is available, we use it for both its PKG-INFO metadata
591 # and for its requires.txt for install_requires.
592 # If PKG-INFO is available but no egg info is, we use that for metadata in preference to
593 # the parsed setup.py, but use the install_requires info from the
594 # parsed setup.py.
595
596 setupscript = os.path.join(srctree, 'setup.py')
597 try:
598 setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript)
599 except Exception:
600 logger.exception("Failed to parse setup.py")
601 setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], []
602
603 egginfo = glob.glob(os.path.join(srctree, '*.egg-info'))
604 if egginfo:
605 info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO'))
606 requires_txt = os.path.join(egginfo[0], 'requires.txt')
607 if os.path.exists(requires_txt):
608 with codecs.open(requires_txt) as f:
609 inst_req = []
610 extras_req = collections.defaultdict(list)
611 current_feature = None
612 for line in f.readlines():
613 line = line.rstrip()
614 if not line:
615 continue
616
617 if line.startswith('['):
618 # PACKAGECONFIG must not contain expressions or whitespace
619 line = line.replace(" ", "")
620 line = line.replace(':', "")
621 line = line.replace('.', "-dot-")
622 line = line.replace('"', "")
623 line = line.replace('<', "-smaller-")
624 line = line.replace('>', "-bigger-")
625 line = line.replace('_', "-")
626 line = line.replace('(', "")
627 line = line.replace(')', "")
628 line = line.replace('!', "-not-")
629 line = line.replace('=', "-equals-")
630 current_feature = line[1:-1]
631 elif current_feature:
632 extras_req[current_feature].append(line)
633 else:
634 inst_req.append(line)
635 info['Install-requires'] = inst_req
636 info['Extras-require'] = extras_req
637 elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']):
638 info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO'))
639
640 if setup_info:
641 if 'Install-requires' in setup_info:
642 info['Install-requires'] = setup_info['Install-requires']
643 if 'Extras-require' in setup_info:
644 info['Extras-require'] = setup_info['Extras-require']
645 else:
646 if setup_info:
647 info = setup_info
648 else:
649 info = self.get_setup_args_info(setupscript)
650
651 # Grab the license value before applying replacements
652 license_str = info.get('License', '').strip()
653
654 self.apply_info_replacements(info)
655
656 if uses_setuptools:
657 classes.append('setuptools3')
658 else:
659 classes.append('distutils3')
660
661 if license_str:
662 for i, line in enumerate(lines_before):
663 if line.startswith('##LICENSE_PLACEHOLDER##'):
664 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
665 break
666
667 if 'Classifier' in info:
668 license = self.handle_classifier_license(info['Classifier'], info.get('License', ''))
669 if license:
670 info['License'] = license
671
672 self.map_info_to_bbvar(info, extravalues)
673
674 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
675
676 extras_req = set()
677 if 'Extras-require' in info:
678 extras_req = info['Extras-require']
679 if extras_req:
680 lines_after.append('# The following configs & dependencies are from setuptools extras_require.')
681 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.')
682 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.')
683 lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.')
684 lines_after.append('#')
685 lines_after.append('# Uncomment this line to enable all the optional features.')
686 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req)))
687 for feature, feature_reqs in extras_req.items():
688 unmapped_deps.difference_update(feature_reqs)
689
690 feature_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(feature_reqs))
691 lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps)))
692
693 inst_reqs = set()
694 if 'Install-requires' in info:
695 if extras_req:
696 lines_after.append('')
697 inst_reqs = info['Install-requires']
698 if inst_reqs:
699 unmapped_deps.difference_update(inst_reqs)
700
701 inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs))
702 lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These')
703 lines_after.append('# upstream names may not correspond exactly to bitbake package names.')
704 lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps)))
705
706 if mapped_deps:
707 name = info.get('Name')
708 if name and name[0] in mapped_deps:
709 # Attempt to avoid self-reference
710 mapped_deps.remove(name[0])
711 mapped_deps -= set(self.excluded_pkgdeps)
712 if inst_reqs or extras_req:
713 lines_after.append('')
714 lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the')
715 lines_after.append('# python sources, and might not be 100% accurate.')
716 lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps))))
717
718 unmapped_deps -= set(extensions)
719 unmapped_deps -= set(self.assume_provided)
720 if unmapped_deps:
721 if mapped_deps:
722 lines_after.append('')
723 lines_after.append('# WARNING: We were unable to map the following python package/module')
724 lines_after.append('# dependencies to the bitbake packages which include them:')
725 lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps))
726
727 handled.append('buildsystem')
728
729class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler):
730 """Base class to support PEP517 and PEP518
731
732 PEP517 https://peps.python.org/pep-0517/#source-trees
733 PEP518 https://peps.python.org/pep-0518/#build-system-table
734 """
735 # bitbake currently supports the 4 following backends
736 build_backend_map = {
737 "setuptools.build_meta": "python_setuptools_build_meta",
738 "poetry.core.masonry.api": "python_poetry_core",
739 "flit_core.buildapi": "python_flit_core",
740 "hatchling.build": "python_hatchling",
741 "maturin": "python_maturin",
742 "mesonpy": "python_mesonpy",
743 }
744
745 # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml
746 # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata
747 # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/
748 # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry
749 # and "Homepage" for "project" section. So keep both
750 bbvar_map = {
751 "name": "PN",
752 "version": "PV",
753 "Homepage": "HOMEPAGE",
754 "homepage": "HOMEPAGE",
755 "description": "SUMMARY",
756 "license": "LICENSE",
757 "dependencies": "RDEPENDS:${PN}",
758 "requires": "DEPENDS",
759 }
760
761 replacements = [
762 ("license", r" +$", ""),
763 ("license", r"^ +", ""),
764 ("license", r" ", "-"),
765 ("license", r"^GNU-", ""),
766 ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""),
767 ("license", r"^UNKNOWN$", ""),
768 # Remove currently unhandled version numbers from these variables
769 ("requires", r"\[[^\]]+\]$", ""),
770 ("requires", r"^([^><= ]+).*", r"\1"),
771 ("dependencies", r"\[[^\]]+\]$", ""),
772 ("dependencies", r"^([^><= ]+).*", r"\1"),
773 ]
774
775 excluded_native_pkgdeps = [
776 # already provided by python_setuptools_build_meta.bbclass
777 "python3-setuptools-native",
778 "python3-wheel-native",
779 # already provided by python_poetry_core.bbclass
780 "python3-poetry-core-native",
781 # already provided by python_flit_core.bbclass
782 "python3-flit-core-native",
783 # already provided by python_mesonpy
784 "python3-meson-python-native",
785 ]
786
787 # add here a list of known and often used packages and the corresponding bitbake package
788 known_deps_map = {
789 "setuptools": "python3-setuptools",
790 "wheel": "python3-wheel",
791 "poetry-core": "python3-poetry-core",
792 "flit_core": "python3-flit-core",
793 "setuptools-scm": "python3-setuptools-scm",
794 "hatchling": "python3-hatchling",
795 "hatch-vcs": "python3-hatch-vcs",
796 "meson-python" : "python3-meson-python",
797 }
798
799 def __init__(self):
800 pass
801
802 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
803 info = {}
804 metadata = {}
805
806 if 'buildsystem' in handled:
807 return False
808
809 logger.debug("Trying pyproject.toml parser")
810
811 # Check for non-zero size setup.py files
812 setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"])
813 for fn in setupfiles:
814 if os.path.getsize(fn):
815 break
816 else:
817 logger.debug("No pyproject.toml found")
818 return False
819
820 setupscript = os.path.join(srctree, "pyproject.toml")
821
822 try:
823 try:
824 import tomllib
825 except ImportError:
826 try:
827 import tomli as tomllib
828 except ImportError:
829 logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.")
830 return False
831
832 try:
833 with open(setupscript, "rb") as f:
834 config = tomllib.load(f)
835 except Exception:
836 logger.exception("Failed to parse pyproject.toml")
837 return False
838
839 build_backend = config["build-system"]["build-backend"]
840 if build_backend in self.build_backend_map:
841 classes.append(self.build_backend_map[build_backend])
842 else:
843 logger.error(
844 "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py"
845 % build_backend
846 )
847 return False
848
849 licfile = ""
850
851 if build_backend == "poetry.core.masonry.api":
852 if "tool" in config and "poetry" in config["tool"]:
853 metadata = config["tool"]["poetry"]
854 else:
855 if "project" in config:
856 metadata = config["project"]
857
858 if metadata:
859 for field, values in metadata.items():
860 if field == "license":
861 # For setuptools.build_meta and flit, licence is a table
862 # but for poetry licence is a string
863 # for hatchling, both table (jsonschema) and string (iniconfig) have been used
864 if build_backend == "poetry.core.masonry.api":
865 value = values
866 else:
867 value = values.get("text", "")
868 if not value:
869 licfile = values.get("file", "")
870 continue
871 elif field == "dependencies" and build_backend == "poetry.core.masonry.api":
872 # For poetry backend, "dependencies" section looks like:
873 # [tool.poetry.dependencies]
874 # requests = "^2.13.0"
875 # requests = { version = "^2.13.0", source = "private" }
876 # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details
877 # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list
878 value = []
879 for k in values.keys():
880 value.append(k)
881 elif isinstance(values, dict):
882 for k, v in values.items():
883 info[k] = v
884 continue
885 else:
886 value = values
887
888 info[field] = value
889
890 # Grab the license value before applying replacements
891 license_str = info.get("license", "").strip()
892
893 if license_str:
894 for i, line in enumerate(lines_before):
895 if line.startswith("##LICENSE_PLACEHOLDER##"):
896 lines_before.insert(
897 i, "# NOTE: License in pyproject.toml is: %s" % license_str
898 )
899 break
900
901 info["requires"] = config["build-system"]["requires"]
902
903 self.apply_info_replacements(info)
904
905 if "classifiers" in info:
906 license = self.handle_classifier_license(
907 info["classifiers"], info.get("license", "")
908 )
909 if license:
910 if licfile:
911 lines = []
912 md5value = bb.utils.md5_file(os.path.join(srctree, licfile))
913 lines.append('LICENSE = "%s"' % license)
914 lines.append(
915 'LIC_FILES_CHKSUM = "file://%s;md5=%s"'
916 % (licfile, md5value)
917 )
918 lines.append("")
919
920 # Replace the placeholder so we get the values in the right place in the recipe file
921 try:
922 pos = lines_before.index("##LICENSE_PLACEHOLDER##")
923 except ValueError:
924 pos = -1
925 if pos == -1:
926 lines_before.extend(lines)
927 else:
928 lines_before[pos : pos + 1] = lines
929
930 handled.append(("license", [license, licfile, md5value]))
931 else:
932 info["license"] = license
933
934 provided_packages = self.parse_pkgdata_for_python_packages()
935 provided_packages.update(self.known_deps_map)
936 native_mapped_deps, native_unmapped_deps = set(), set()
937 mapped_deps, unmapped_deps = set(), set()
938
939 if "requires" in info:
940 for require in info["requires"]:
941 mapped = provided_packages.get(require)
942
943 if mapped:
944 logger.debug("Mapped %s to %s" % (require, mapped))
945 native_mapped_deps.add(mapped)
946 else:
947 logger.debug("Could not map %s" % require)
948 native_unmapped_deps.add(require)
949
950 info.pop("requires")
951
952 if native_mapped_deps != set():
953 native_mapped_deps = {
954 item + "-native" for item in native_mapped_deps
955 }
956 native_mapped_deps -= set(self.excluded_native_pkgdeps)
957 if native_mapped_deps != set():
958 info["requires"] = " ".join(sorted(native_mapped_deps))
959
960 if native_unmapped_deps:
961 lines_after.append("")
962 lines_after.append(
963 "# WARNING: We were unable to map the following python package/module"
964 )
965 lines_after.append(
966 "# dependencies to the bitbake packages which include them:"
967 )
968 lines_after.extend(
969 "# {}".format(d) for d in sorted(native_unmapped_deps)
970 )
971
972 if "dependencies" in info:
973 for dependency in info["dependencies"]:
974 mapped = provided_packages.get(dependency)
975 if mapped:
976 logger.debug("Mapped %s to %s" % (dependency, mapped))
977 mapped_deps.add(mapped)
978 else:
979 logger.debug("Could not map %s" % dependency)
980 unmapped_deps.add(dependency)
981
982 info.pop("dependencies")
983
984 if mapped_deps != set():
985 if mapped_deps != set():
986 info["dependencies"] = " ".join(sorted(mapped_deps))
987
988 if unmapped_deps:
989 lines_after.append("")
990 lines_after.append(
991 "# WARNING: We were unable to map the following python package/module"
992 )
993 lines_after.append(
994 "# runtime dependencies to the bitbake packages which include them:"
995 )
996 lines_after.extend(
997 "# {}".format(d) for d in sorted(unmapped_deps)
998 )
999
1000 self.map_info_to_bbvar(info, extravalues)
1001
1002 handled.append("buildsystem")
1003 except Exception:
1004 logger.exception("Failed to correctly handle pyproject.toml, falling back to another method")
1005 return False
1006
1007
1008def gather_setup_info(fileobj):
1009 parsed = ast.parse(fileobj.read(), fileobj.name)
1010 visitor = SetupScriptVisitor()
1011 visitor.visit(parsed)
1012
1013 non_literals, extensions = {}, []
1014 for key, value in list(visitor.keywords.items()):
1015 if key == 'ext_modules':
1016 if isinstance(value, list):
1017 for ext in value:
1018 if (isinstance(ext, ast.Call) and
1019 isinstance(ext.func, ast.Name) and
1020 ext.func.id == 'Extension' and
1021 not has_non_literals(ext.args)):
1022 extensions.append(ext.args[0])
1023 elif has_non_literals(value):
1024 non_literals[key] = value
1025 del visitor.keywords[key]
1026
1027 return visitor.keywords, visitor.imported_modules, non_literals, extensions
1028
1029
1030class SetupScriptVisitor(ast.NodeVisitor):
1031 def __init__(self):
1032 ast.NodeVisitor.__init__(self)
1033 self.keywords = {}
1034 self.non_literals = []
1035 self.imported_modules = set()
1036
1037 def visit_Expr(self, node):
1038 if isinstance(node.value, ast.Call) and \
1039 isinstance(node.value.func, ast.Name) and \
1040 node.value.func.id == 'setup':
1041 self.visit_setup(node.value)
1042
1043 def visit_setup(self, node):
1044 call = LiteralAstTransform().visit(node)
1045 self.keywords = call.keywords
1046 for k, v in self.keywords.items():
1047 if has_non_literals(v):
1048 self.non_literals.append(k)
1049
1050 def visit_Import(self, node):
1051 for alias in node.names:
1052 self.imported_modules.add(alias.name)
1053
1054 def visit_ImportFrom(self, node):
1055 self.imported_modules.add(node.module)
1056
1057
1058class LiteralAstTransform(ast.NodeTransformer):
1059 """Simplify the ast through evaluation of literals."""
1060 excluded_fields = ['ctx']
1061
1062 def visit(self, node):
1063 if not isinstance(node, ast.AST):
1064 return node
1065 else:
1066 return ast.NodeTransformer.visit(self, node)
1067
1068 def generic_visit(self, node):
1069 try:
1070 return ast.literal_eval(node)
1071 except ValueError:
1072 for field, value in ast.iter_fields(node):
1073 if field in self.excluded_fields:
1074 delattr(node, field)
1075 if value is None:
1076 continue
1077
1078 if isinstance(value, list):
1079 if field in ('keywords', 'kwargs'):
1080 new_value = dict((kw.arg, self.visit(kw.value)) for kw in value)
1081 else:
1082 new_value = [self.visit(i) for i in value]
1083 else:
1084 new_value = self.visit(value)
1085 setattr(node, field, new_value)
1086 return node
1087
1088 def visit_Name(self, node):
1089 if hasattr('__builtins__', node.id):
1090 return getattr(__builtins__, node.id)
1091 else:
1092 return self.generic_visit(node)
1093
1094 def visit_Tuple(self, node):
1095 return tuple(self.visit(v) for v in node.elts)
1096
1097 def visit_List(self, node):
1098 return [self.visit(v) for v in node.elts]
1099
1100 def visit_Set(self, node):
1101 return set(self.visit(v) for v in node.elts)
1102
1103 def visit_Dict(self, node):
1104 keys = (self.visit(k) for k in node.keys)
1105 values = (self.visit(v) for v in node.values)
1106 return dict(zip(keys, values))
1107
1108
1109def has_non_literals(value):
1110 if isinstance(value, ast.AST):
1111 return True
1112 elif isinstance(value, str):
1113 return False
1114 elif hasattr(value, 'values'):
1115 return any(has_non_literals(v) for v in value.values())
1116 elif hasattr(value, '__iter__'):
1117 return any(has_non_literals(v) for v in value)
1118
1119
1120def register_recipe_handlers(handlers):
1121 # We need to make sure these are ahead of the makefile fallback handler
1122 # and the pyproject.toml handler ahead of the setup.py handler
1123 handlers.append((PythonPyprojectTomlRecipeHandler(), 75))
1124 handlers.append((PythonSetupPyRecipeHandler(), 70))
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
deleted file mode 100644
index 5cc53931f0..0000000000
--- a/scripts/lib/recipetool/create_go.py
+++ /dev/null
@@ -1,777 +0,0 @@
1# Recipe creation tool - go support plugin
2#
3# The code is based on golang internals. See the afftected
4# methods for further reference and information.
5#
6# Copyright (C) 2023 Weidmueller GmbH & Co KG
7# Author: Lukas Funke <lukas.funke@weidmueller.com>
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12
13from collections import namedtuple
14from enum import Enum
15from html.parser import HTMLParser
16from recipetool.create import RecipeHandler, handle_license_vars
17from recipetool.create import find_licenses, tidy_licenses, fixup_license
18from recipetool.create import determine_from_url
19from urllib.error import URLError, HTTPError
20
21import bb.utils
22import json
23import logging
24import os
25import re
26import subprocess
27import sys
28import shutil
29import tempfile
30import urllib.parse
31import urllib.request
32
33
34GoImport = namedtuple('GoImport', 'root vcs url suffix')
35logger = logging.getLogger('recipetool')
36CodeRepo = namedtuple(
37 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
38
39tinfoil = None
40
41# Regular expression to parse pseudo semantic version
42# see https://go.dev/ref/mod#pseudo-versions
43re_pseudo_semver = re.compile(
44 r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
45# Regular expression to parse semantic version
46re_semver = re.compile(
47 r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
48
49
50def tinfoil_init(instance):
51 global tinfoil
52 tinfoil = instance
53
54
55class GoRecipeHandler(RecipeHandler):
56 """Class to handle the go recipe creation"""
57
58 @staticmethod
59 def __ensure_go():
60 """Check if the 'go' command is available in the recipes"""
61 recipe = "go-native"
62 if not tinfoil.recipes_parsed:
63 tinfoil.parse_recipes()
64 try:
65 rd = tinfoil.parse_recipe(recipe)
66 except bb.providers.NoProvider:
67 bb.error(
68 "Nothing provides '%s' which is required for the build" % (recipe))
69 bb.note(
70 "You will likely need to add a layer that provides '%s'" % (recipe))
71 return None
72
73 bindir = rd.getVar('STAGING_BINDIR_NATIVE')
74 gopath = os.path.join(bindir, 'go')
75
76 if not os.path.exists(gopath):
77 tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
78
79 if not os.path.exists(gopath):
80 logger.error(
81 '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
82 return None
83
84 return bindir
85
86 def __resolve_repository_static(self, modulepath):
87 """Resolve the repository in a static manner
88
89 The method is based on the go implementation of
90 `repoRootFromVCSPaths` in
91 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
92 """
93
94 url = urllib.parse.urlparse("https://" + modulepath)
95 req = urllib.request.Request(url.geturl())
96
97 try:
98 resp = urllib.request.urlopen(req)
99 # Some modulepath are just redirects to github (or some other vcs
100 # hoster). Therefore, we check if this modulepath redirects to
101 # somewhere else
102 if resp.geturl() != url.geturl():
103 bb.debug(1, "%s is redirectred to %s" %
104 (url.geturl(), resp.geturl()))
105 url = urllib.parse.urlparse(resp.geturl())
106 modulepath = url.netloc + url.path
107
108 except URLError as url_err:
109 # This is probably because the module path
110 # contains the subdir and major path. Thus,
111 # we ignore this error for now
112 logger.debug(
113 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
114
115 host, _, _ = modulepath.partition('/')
116
117 class vcs(Enum):
118 pathprefix = "pathprefix"
119 regexp = "regexp"
120 type = "type"
121 repo = "repo"
122 check = "check"
123 schemelessRepo = "schemelessRepo"
124
125 # GitHub
126 vcsGitHub = {}
127 vcsGitHub[vcs.pathprefix] = "github.com"
128 vcsGitHub[vcs.regexp] = re.compile(
129 r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
130 vcsGitHub[vcs.type] = "git"
131 vcsGitHub[vcs.repo] = "https://\\g<root>"
132
133 # Bitbucket
134 vcsBitbucket = {}
135 vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
136 vcsBitbucket[vcs.regexp] = re.compile(
137 r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
138 vcsBitbucket[vcs.type] = "git"
139 vcsBitbucket[vcs.repo] = "https://\\g<root>"
140
141 # IBM DevOps Services (JazzHub)
142 vcsIBMDevOps = {}
143 vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
144 vcsIBMDevOps[vcs.regexp] = re.compile(
145 r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
146 vcsIBMDevOps[vcs.type] = "git"
147 vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
148
149 # Git at Apache
150 vcsApacheGit = {}
151 vcsApacheGit[vcs.pathprefix] = "git.apache.org"
152 vcsApacheGit[vcs.regexp] = re.compile(
153 r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
154 vcsApacheGit[vcs.type] = "git"
155 vcsApacheGit[vcs.repo] = "https://\\g<root>"
156
157 # Git at OpenStack
158 vcsOpenStackGit = {}
159 vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
160 vcsOpenStackGit[vcs.regexp] = re.compile(
161 r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
162 vcsOpenStackGit[vcs.type] = "git"
163 vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
164
165 # chiselapp.com for fossil
166 vcsChiselapp = {}
167 vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
168 vcsChiselapp[vcs.regexp] = re.compile(
169 r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
170 vcsChiselapp[vcs.type] = "fossil"
171 vcsChiselapp[vcs.repo] = "https://\\g<root>"
172
173 # General syntax for any server.
174 # Must be last.
175 vcsGeneralServer = {}
176 vcsGeneralServer[vcs.regexp] = re.compile(
177 "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
178 vcsGeneralServer[vcs.schemelessRepo] = True
179
180 vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
181 vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
182 vcsGeneralServer]
183
184 if modulepath.startswith("example.net") or modulepath == "rsc.io":
185 logger.warning("Suspicious module path %s" % modulepath)
186 return None
187 if modulepath.startswith("http:") or modulepath.startswith("https:"):
188 logger.warning("Import path should not start with %s %s" %
189 ("http", "https"))
190 return None
191
192 rootpath = None
193 vcstype = None
194 repourl = None
195 suffix = None
196
197 for srv in vcsPaths:
198 m = srv[vcs.regexp].match(modulepath)
199 if vcs.pathprefix in srv:
200 if host == srv[vcs.pathprefix]:
201 rootpath = m.group('root')
202 vcstype = srv[vcs.type]
203 repourl = m.expand(srv[vcs.repo])
204 suffix = m.group('suffix')
205 break
206 elif m and srv[vcs.schemelessRepo]:
207 rootpath = m.group('root')
208 vcstype = m[vcs.type]
209 repourl = m[vcs.repo]
210 suffix = m.group('suffix')
211 break
212
213 return GoImport(rootpath, vcstype, repourl, suffix)
214
215 def __resolve_repository_dynamic(self, modulepath):
216 """Resolve the repository root in a dynamic manner.
217
218 The method is based on the go implementation of
219 `repoRootForImportDynamic` in
220 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
221 """
222 url = urllib.parse.urlparse("https://" + modulepath)
223
224 class GoImportHTMLParser(HTMLParser):
225
226 def __init__(self):
227 super().__init__()
228 self.__srv = {}
229
230 def handle_starttag(self, tag, attrs):
231 if tag == 'meta' and list(
232 filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
233 content = list(
234 filter(lambda a: (a[0] == 'content'), attrs))
235 if content:
236 srv = content[0][1].split()
237 self.__srv[srv[0]] = srv
238
239 def go_import(self, modulepath):
240 if modulepath in self.__srv:
241 srv = self.__srv[modulepath]
242 return GoImport(srv[0], srv[1], srv[2], None)
243 return None
244
245 url = url.geturl() + "?go-get=1"
246 req = urllib.request.Request(url)
247
248 try:
249 body = urllib.request.urlopen(req).read()
250 except HTTPError as http_err:
251 logger.warning(
252 "Unclean status when fetching page from [%s]: %s", url, str(http_err))
253 body = http_err.fp.read()
254 except URLError as url_err:
255 logger.warning(
256 "Failed to fetch page from [%s]: %s", url, str(url_err))
257 return None
258
259 parser = GoImportHTMLParser()
260 parser.feed(body.decode('utf-8'))
261 parser.close()
262
263 return parser.go_import(modulepath)
264
265 def __resolve_from_golang_proxy(self, modulepath, version):
266 """
267 Resolves repository data from golang proxy
268 """
269 url = urllib.parse.urlparse("https://proxy.golang.org/"
270 + modulepath
271 + "/@v/"
272 + version
273 + ".info")
274
275 # Transform url to lower case, golang proxy doesn't like mixed case
276 req = urllib.request.Request(url.geturl().lower())
277
278 try:
279 resp = urllib.request.urlopen(req)
280 except URLError as url_err:
281 logger.warning(
282 "Failed to fetch page from [%s]: %s", url, str(url_err))
283 return None
284
285 golang_proxy_res = resp.read().decode('utf-8')
286 modinfo = json.loads(golang_proxy_res)
287
288 if modinfo and 'Origin' in modinfo:
289 origin = modinfo['Origin']
290 _root_url = urllib.parse.urlparse(origin['URL'])
291
292 # We normalize the repo URL since we don't want the scheme in it
293 _subdir = origin['Subdir'] if 'Subdir' in origin else None
294 _root, _, _ = self.__split_path_version(modulepath)
295 if _subdir:
296 _root = _root[:-len(_subdir)].strip('/')
297
298 _commit = origin['Hash']
299 _vcs = origin['VCS']
300 return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
301
302 return None
303
304 def __resolve_repository(self, modulepath):
305 """
306 Resolves src uri from go module-path
307 """
308 repodata = self.__resolve_repository_static(modulepath)
309 if not repodata or not repodata.url:
310 repodata = self.__resolve_repository_dynamic(modulepath)
311 if not repodata or not repodata.url:
312 logger.error(
313 "Could not resolve repository for module path '%s'" % modulepath)
314 # There is no way to recover from this
315 sys.exit(14)
316 if repodata:
317 logger.debug(1, "Resolved download path for import '%s' => %s" % (
318 modulepath, repodata.url))
319 return repodata
320
321 def __split_path_version(self, path):
322 i = len(path)
323 dot = False
324 for j in range(i, 0, -1):
325 if path[j - 1] < '0' or path[j - 1] > '9':
326 break
327 if path[j - 1] == '.':
328 dot = True
329 break
330 i = j - 1
331
332 if i <= 1 or i == len(
333 path) or path[i - 1] != 'v' or path[i - 2] != '/':
334 return path, "", True
335
336 prefix, pathMajor = path[:i - 2], path[i - 2:]
337 if dot or len(
338 pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
339 return path, "", False
340
341 return prefix, pathMajor, True
342
343 def __get_path_major(self, pathMajor):
344 if not pathMajor:
345 return ""
346
347 if pathMajor[0] != '/' and pathMajor[0] != '.':
348 logger.error(
349 "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
350
351 if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
352 pathMajor = pathMajor[:len("-unstable") - 2]
353
354 return pathMajor[1:]
355
356 def __build_coderepo(self, repo, path):
357 codedir = ""
358 pathprefix, pathMajor, _ = self.__split_path_version(path)
359 if repo.root == path:
360 pathprefix = path
361 elif path.startswith(repo.root):
362 codedir = pathprefix[len(repo.root):].strip('/')
363
364 pseudoMajor = self.__get_path_major(pathMajor)
365
366 logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
367 repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
368
369 return CodeRepo(path, repo.root, codedir,
370 pathMajor, pathprefix, pseudoMajor)
371
372 def __resolve_version(self, repo, path, version):
373 hash = None
374 coderoot = self.__build_coderepo(repo, path)
375
376 def vcs_fetch_all():
377 tmpdir = tempfile.mkdtemp()
378 clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
379 bb.process.run(clone_cmd)
380 log_cmd = "git log --all --pretty='%H %d' --decorate=short"
381 output, _ = bb.process.run(
382 log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
383 bb.utils.prunedir(tmpdir)
384 return output.strip().split('\n')
385
386 def vcs_fetch_remote(tag):
387 # add * to grab ^{}
388 refs = {}
389 ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
390 repo.url, tag)
391 output, _ = bb.process.run(ls_remote_cmd)
392 output = output.strip().split('\n')
393 for line in output:
394 f = line.split(maxsplit=1)
395 if len(f) != 2:
396 continue
397
398 for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
399 if f[1].startswith(prefix):
400 refs[f[1][len(prefix):]] = f[0]
401
402 for key, hash in refs.items():
403 if key.endswith(r"^{}"):
404 refs[key.strip(r"^{}")] = hash
405
406 return refs[tag]
407
408 m_pseudo_semver = re_pseudo_semver.match(version)
409
410 if m_pseudo_semver:
411 remote_refs = vcs_fetch_all()
412 short_commit = m_pseudo_semver.group('commithash')
413 for l in remote_refs:
414 r = l.split(maxsplit=1)
415 sha1 = r[0] if len(r) else None
416 if not sha1:
417 logger.error(
418 "Ups: could not resolve abbref commit for %s" % short_commit)
419
420 elif sha1.startswith(short_commit):
421 hash = sha1
422 break
423 else:
424 m_semver = re_semver.match(version)
425 if m_semver:
426
427 def get_sha1_remote(re):
428 rsha1 = None
429 for line in remote_refs:
430 # Split lines of the following format:
431 # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
432 lineparts = line.split(maxsplit=1)
433 sha1 = lineparts[0] if len(lineparts) else None
434 refstring = lineparts[1] if len(
435 lineparts) == 2 else None
436 if refstring:
437 # Normalize tag string and split in case of multiple
438 # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
439 refs = refstring.strip('(), ').split(',')
440 for ref in refs:
441 if re.match(ref.strip()):
442 rsha1 = sha1
443 return rsha1
444
445 semver = "v" + m_semver.group('major') + "."\
446 + m_semver.group('minor') + "."\
447 + m_semver.group('patch') \
448 + (("-" + m_semver.group('prerelease'))
449 if m_semver.group('prerelease') else "")
450
451 tag = os.path.join(
452 coderoot.codeDir, semver) if coderoot.codeDir else semver
453
454 # probe tag using 'ls-remote', which is faster than fetching
455 # complete history
456 hash = vcs_fetch_remote(tag)
457 if not hash:
458 # backup: fetch complete history
459 remote_refs = vcs_fetch_all()
460 hash = get_sha1_remote(
461 re.compile(fr"(tag:|HEAD ->) ({tag})"))
462
463 logger.debug(
464 "Resolving commit for tag '%s' -> '%s'", tag, hash)
465 return hash
466
467 def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
468 """Generate SRC_URI functions for go imports"""
469
470 logger.info("Resolving repository for module %s", path)
471 # First try to resolve repo and commit from golang proxy
472 # Most info is already there and we don't have to go through the
473 # repository or even perform the version resolve magic
474 golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
475 if golang_proxy_info:
476 repo = golang_proxy_info[0]
477 commit = golang_proxy_info[1]
478 else:
479 # Fallback
480 # Resolve repository by 'hand'
481 repo = self.__resolve_repository(path)
482 commit = self.__resolve_version(repo, path, version)
483
484 url = urllib.parse.urlparse(repo.url)
485 repo_url = url.netloc + url.path
486
487 coderoot = self.__build_coderepo(repo, path)
488
489 inline_fcn = "${@go_src_uri("
490 inline_fcn += f"'{repo_url}','{version}'"
491 if repo_url != path:
492 inline_fcn += f",path='{path}'"
493 if coderoot.codeDir:
494 inline_fcn += f",subdir='{coderoot.codeDir}'"
495 if repo.vcs != 'git':
496 inline_fcn += f",vcs='{repo.vcs}'"
497 if replaces:
498 inline_fcn += f",replaces='{replaces}'"
499 if coderoot.pathMajor:
500 inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
501 inline_fcn += ")}"
502
503 return inline_fcn, commit
504
505 def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
506
507 import re
508 src_uris = []
509 src_revs = []
510
511 def generate_src_rev(path, version, commithash):
512 src_rev = f"# {path}@{version} => {commithash}\n"
513 # Ups...maybe someone manipulated the source repository and the
514 # version or commit could not be resolved. This is a sign of
515 # a) the supply chain was manipulated (bad)
516 # b) the implementation for the version resolving didn't work
517 # anymore (less bad)
518 if not commithash:
519 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
520 src_rev += f"#!!! Could not resolve version !!!\n"
521 src_rev += f"#!!! Possible supply chain attack !!!\n"
522 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
523 src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
524
525 return src_rev
526
527 # we first go over replacement list, because we are essentialy
528 # interested only in the replaced path
529 if go_mod['Replace']:
530 for replacement in go_mod['Replace']:
531 oldpath = replacement['Old']['Path']
532 path = replacement['New']['Path']
533 version = ''
534 if 'Version' in replacement['New']:
535 version = replacement['New']['Version']
536
537 if os.path.exists(os.path.join(srctree, path)):
538 # the module refers to the local path, remove it from requirement list
539 # because it's a local module
540 go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
541 else:
542 # Replace the path and the version, so we don't iterate replacement list anymore
543 for require in go_mod['Require']:
544 if require['Path'] == oldpath:
545 require.update({'Path': path, 'Version': version})
546 break
547
548 for require in go_mod['Require']:
549 path = require['Path']
550 version = require['Version']
551
552 inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
553 path, version)
554 src_uris.append(inline_fcn)
555 src_revs.append(generate_src_rev(path, version, commithash))
556
557 # strip version part from module URL /vXX
558 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
559 pn, _ = determine_from_url(baseurl)
560 go_mods_basename = "%s-modules.inc" % pn
561
562 go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
563 with open(go_mods_filename, "w") as f:
564 # We introduce this indirection to make the tests a little easier
565 f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
566 f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
567 for uri in src_uris:
568 f.write(" " + uri + " \\\n")
569 f.write("\"\n\n")
570 for rev in src_revs:
571 f.write(rev + "\n")
572
573 extravalues['extrafiles'][go_mods_basename] = go_mods_filename
574
575 def __go_run_cmd(self, cmd, cwd, d):
576 return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
577 shell=True, cwd=cwd)
578
579 def __go_native_version(self, d):
580 stdout, _ = self.__go_run_cmd("go version", None, d)
581 m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
582 major = int(m.group(2))
583 minor = int(m.group(3))
584 patch = int(m.group(4))
585
586 return major, minor, patch
587
588 def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
589
590 patchfilename = "go.mod.patch"
591 go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
592 d)
593 self.__go_run_cmd("go mod tidy -go=%d.%d" %
594 (go_native_version_major, go_native_version_minor), srctree, d)
595 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
596
597 # Create patch in order to upgrade go version
598 self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
599 # Restore original state
600 self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
601
602 go_mod = json.loads(stdout)
603 tmpfile = os.path.join(localfilesdir, patchfilename)
604 shutil.move(os.path.join(srctree, patchfilename), tmpfile)
605
606 extravalues['extrafiles'][patchfilename] = tmpfile
607
608 return go_mod, patchfilename
609
610 def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
611 # Perform vendoring to retrieve the correct modules.txt
612 tmp_vendor_dir = tempfile.mkdtemp()
613
614 # -v causes to go to print modules.txt to stderr
615 _, stderr = self.__go_run_cmd(
616 "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
617
618 modules_txt_basename = "modules.txt"
619 modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
620 with open(modules_txt_filename, "w") as f:
621 f.write(stderr)
622
623 extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
624
625 licenses = []
626 lic_files_chksum = []
627 licvalues = find_licenses(tmp_vendor_dir, d)
628 shutil.rmtree(tmp_vendor_dir)
629
630 if licvalues:
631 for licvalue in licvalues:
632 license = licvalue[0]
633 lics = tidy_licenses(fixup_license(license))
634 lics = [lic for lic in lics if lic not in licenses]
635 if len(lics):
636 licenses.extend(lics)
637 lic_files_chksum.append(
638 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
639
640 # strip version part from module URL /vXX
641 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
642 pn, _ = determine_from_url(baseurl)
643 licenses_basename = "%s-licenses.inc" % pn
644
645 licenses_filename = os.path.join(localfilesdir, licenses_basename)
646 with open(licenses_filename, "w") as f:
647 f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
648 ' & '.join(sorted(licenses, key=str.casefold)))
649 # We introduce this indirection to make the tests a little easier
650 f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
651 f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
652 for lic in lic_files_chksum:
653 f.write(" " + lic + " \\\n")
654 f.write("\"\n")
655
656 extravalues['extrafiles'][licenses_basename] = licenses_filename
657
658 def process(self, srctree, classes, lines_before,
659 lines_after, handled, extravalues):
660
661 if 'buildsystem' in handled:
662 return False
663
664 files = RecipeHandler.checkfiles(srctree, ['go.mod'])
665 if not files:
666 return False
667
668 d = bb.data.createCopy(tinfoil.config_data)
669 go_bindir = self.__ensure_go()
670 if not go_bindir:
671 sys.exit(14)
672
673 d.prependVar('PATH', '%s:' % go_bindir)
674 handled.append('buildsystem')
675 classes.append("go-vendor")
676
677 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
678
679 go_mod = json.loads(stdout)
680 go_import = go_mod['Module']['Path']
681 go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go'])
682 go_version_major = int(go_version_match.group(1))
683 go_version_minor = int(go_version_match.group(2))
684 src_uris = []
685
686 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
687 extravalues.setdefault('extrafiles', {})
688
689 # Use an explicit name determined from the module name because it
690 # might differ from the actual URL for replaced modules
691 # strip version part from module URL /vXX
692 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
693 pn, _ = determine_from_url(baseurl)
694
695 # go.mod files with version < 1.17 may not include all indirect
696 # dependencies. Thus, we have to upgrade the go version.
697 if go_version_major == 1 and go_version_minor < 17:
698 logger.warning(
699 "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.")
700 go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir,
701 extravalues, d)
702 src_uris.append(
703 "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
704
705 # Check whether the module is vendored. If so, we have nothing to do.
706 # Otherwise we gather all dependencies and add them to the recipe
707 if not os.path.exists(os.path.join(srctree, "vendor")):
708
709 # Write additional $BPN-modules.inc file
710 self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d)
711 lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"")
712 lines_before.append("require %s-licenses.inc" % (pn))
713
714 self.__rewrite_src_uri(lines_before, ["file://modules.txt"])
715
716 self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d)
717 lines_before.append("require %s-modules.inc" % (pn))
718
719 # Do generic license handling
720 handle_license_vars(srctree, lines_before, handled, extravalues, d)
721 self.__rewrite_lic_uri(lines_before)
722
723 lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
724 lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
725
726 def __update_lines_before(self, updated, newlines, lines_before):
727 if updated:
728 del lines_before[:]
729 for line in newlines:
730 # Hack to avoid newlines that edit_metadata inserts
731 if line.endswith('\n'):
732 line = line[:-1]
733 lines_before.append(line)
734 return updated
735
736 def __rewrite_lic_uri(self, lines_before):
737
738 def varfunc(varname, origvalue, op, newlines):
739 if varname == 'LIC_FILES_CHKSUM':
740 new_licenses = []
741 licenses = origvalue.split('\\')
742 for license in licenses:
743 if not license:
744 logger.warning("No license file was detected for the main module!")
745 # the license list of the main recipe must be empty
746 # this can happen for example in case of CLOSED license
747 # Fall through to complete recipe generation
748 continue
749 license = license.strip()
750 uri, chksum = license.split(';', 1)
751 url = urllib.parse.urlparse(uri)
752 new_uri = os.path.join(
753 url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
754 new_licenses.append(new_uri)
755
756 return new_licenses, None, -1, True
757 return origvalue, None, 0, True
758
759 updated, newlines = bb.utils.edit_metadata(
760 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
761 return self.__update_lines_before(updated, newlines, lines_before)
762
763 def __rewrite_src_uri(self, lines_before, additional_uris = []):
764
765 def varfunc(varname, origvalue, op, newlines):
766 if varname == 'SRC_URI':
767 src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"]
768 src_uri.extend(additional_uris)
769 return src_uri, None, -1, True
770 return origvalue, None, 0, True
771
772 updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
773 return self.__update_lines_before(updated, newlines, lines_before)
774
775
776def register_recipe_handlers(handlers):
777 handlers.append((GoRecipeHandler(), 60))
diff --git a/scripts/lib/recipetool/create_kernel.py b/scripts/lib/recipetool/create_kernel.py
deleted file mode 100644
index 5740589a68..0000000000
--- a/scripts/lib/recipetool/create_kernel.py
+++ /dev/null
@@ -1,89 +0,0 @@
1# Recipe creation tool - kernel support plugin
2#
3# Copyright (C) 2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import re
9import logging
10from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
11
12logger = logging.getLogger('recipetool')
13
14tinfoil = None
15
16def tinfoil_init(instance):
17 global tinfoil
18 tinfoil = instance
19
20
21class KernelRecipeHandler(RecipeHandler):
22 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
23 import bb.process
24 if 'buildsystem' in handled:
25 return False
26
27 for tell in ['arch', 'firmware', 'Kbuild', 'Kconfig']:
28 if not os.path.exists(os.path.join(srctree, tell)):
29 return False
30
31 handled.append('buildsystem')
32 del lines_after[:]
33 del classes[:]
34 template = os.path.join(tinfoil.config_data.getVar('COREBASE'), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb')
35 def handle_var(varname, origvalue, op, newlines):
36 if varname in ['SRCREV', 'SRCREV_machine']:
37 while newlines[-1].startswith('#'):
38 del newlines[-1]
39 try:
40 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree, shell=True)
41 except bb.process.ExecutionError as e:
42 stdout = None
43 if stdout:
44 return stdout.strip(), op, 0, True
45 elif varname == 'LINUX_VERSION':
46 makefile = os.path.join(srctree, 'Makefile')
47 if os.path.exists(makefile):
48 kversion = -1
49 kpatchlevel = -1
50 ksublevel = -1
51 kextraversion = ''
52 with open(makefile, 'r', errors='surrogateescape') as f:
53 for i, line in enumerate(f):
54 if i > 10:
55 break
56 if line.startswith('VERSION ='):
57 kversion = int(line.split('=')[1].strip())
58 elif line.startswith('PATCHLEVEL ='):
59 kpatchlevel = int(line.split('=')[1].strip())
60 elif line.startswith('SUBLEVEL ='):
61 ksublevel = int(line.split('=')[1].strip())
62 elif line.startswith('EXTRAVERSION ='):
63 kextraversion = line.split('=')[1].strip()
64 version = ''
65 if kversion > -1 and kpatchlevel > -1:
66 version = '%d.%d' % (kversion, kpatchlevel)
67 if ksublevel > -1:
68 version += '.%d' % ksublevel
69 version += kextraversion
70 if version:
71 return version, op, 0, True
72 elif varname == 'SRC_URI':
73 while newlines[-1].startswith('#'):
74 del newlines[-1]
75 elif varname == 'COMPATIBLE_MACHINE':
76 while newlines[-1].startswith('#'):
77 del newlines[-1]
78 machine = tinfoil.config_data.getVar('MACHINE')
79 return machine, op, 0, True
80 return origvalue, op, 0, True
81 with open(template, 'r') as f:
82 varlist = ['SRCREV', 'SRCREV_machine', 'SRC_URI', 'LINUX_VERSION', 'COMPATIBLE_MACHINE']
83 (_, newlines) = bb.utils.edit_metadata(f, varlist, handle_var)
84 lines_before[:] = [line.rstrip('\n') for line in newlines]
85
86 return True
87
88def register_recipe_handlers(handlers):
89 handlers.append((KernelRecipeHandler(), 100))
diff --git a/scripts/lib/recipetool/create_kmod.py b/scripts/lib/recipetool/create_kmod.py
deleted file mode 100644
index cc00106961..0000000000
--- a/scripts/lib/recipetool/create_kmod.py
+++ /dev/null
@@ -1,142 +0,0 @@
1# Recipe creation tool - kernel module support plugin
2#
3# Copyright (C) 2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import re
9import logging
10from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
11
12logger = logging.getLogger('recipetool')
13
14tinfoil = None
15
16def tinfoil_init(instance):
17 global tinfoil
18 tinfoil = instance
19
20
21class KernelModuleRecipeHandler(RecipeHandler):
22 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
23 import bb.process
24 if 'buildsystem' in handled:
25 return False
26
27 module_inc_re = re.compile(r'^#include\s+<linux/module.h>$')
28 makefiles = []
29 is_module = False
30
31 makefiles = []
32
33 files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True, excludedirs=['contrib', 'test', 'examples'])
34 if files:
35 for cfile in files:
36 # Look in same dir or parent for Makefile
37 for makefile in [os.path.join(os.path.dirname(cfile), 'Makefile'), os.path.join(os.path.dirname(os.path.dirname(cfile)), 'Makefile')]:
38 if makefile in makefiles:
39 break
40 else:
41 if os.path.exists(makefile):
42 makefiles.append(makefile)
43 break
44 else:
45 continue
46 with open(cfile, 'r', errors='surrogateescape') as f:
47 for line in f:
48 if module_inc_re.match(line.strip()):
49 is_module = True
50 break
51 if is_module:
52 break
53
54 if is_module:
55 classes.append('module')
56 handled.append('buildsystem')
57 # module.bbclass and the classes it inherits do most of the hard
58 # work, but we need to tweak it slightly depending on what the
59 # Makefile does (and there is a range of those)
60 # Check the makefile for the appropriate install target
61 install_lines = []
62 compile_lines = []
63 in_install = False
64 in_compile = False
65 install_target = None
66 with open(makefile, 'r', errors='surrogateescape') as f:
67 for line in f:
68 if line.startswith('install:'):
69 if not install_lines:
70 in_install = True
71 install_target = 'install'
72 elif line.startswith('modules_install:'):
73 install_lines = []
74 in_install = True
75 install_target = 'modules_install'
76 elif line.startswith('modules:'):
77 compile_lines = []
78 in_compile = True
79 elif line.startswith(('all:', 'default:')):
80 if not compile_lines:
81 in_compile = True
82 elif line:
83 if line[0] == '\t':
84 if in_install:
85 install_lines.append(line)
86 elif in_compile:
87 compile_lines.append(line)
88 elif ':' in line:
89 in_install = False
90 in_compile = False
91
92 def check_target(lines, install):
93 kdirpath = ''
94 manual_install = False
95 for line in lines:
96 splitline = line.split()
97 if splitline[0] in ['make', 'gmake', '$(MAKE)']:
98 if '-C' in splitline:
99 idx = splitline.index('-C') + 1
100 if idx < len(splitline):
101 kdirpath = splitline[idx]
102 break
103 elif install and splitline[0] == 'install':
104 if '.ko' in line:
105 manual_install = True
106 return kdirpath, manual_install
107
108 kdirpath = None
109 manual_install = False
110 if install_lines:
111 kdirpath, manual_install = check_target(install_lines, install=True)
112 if compile_lines and not kdirpath:
113 kdirpath, _ = check_target(compile_lines, install=False)
114
115 if manual_install or not install_lines:
116 lines_after.append('EXTRA_OEMAKE:append:task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"')
117 elif install_target and install_target != 'modules_install':
118 lines_after.append('MODULES_INSTALL_TARGET = "install"')
119
120 warnmsg = None
121 kdirvar = None
122 if kdirpath:
123 res = re.match(r'\$\(([^$)]+)\)', kdirpath)
124 if res:
125 kdirvar = res.group(1)
126 if kdirvar != 'KERNEL_SRC':
127 lines_after.append('EXTRA_OEMAKE += "%s=${STAGING_KERNEL_DIR}"' % kdirvar)
128 elif kdirpath.startswith('/lib/'):
129 warnmsg = 'Kernel path in install makefile is hardcoded - you will need to patch the makefile'
130 if not kdirvar and not warnmsg:
131 warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile'
132 if warnmsg:
133 warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.'
134 logger.warning(warnmsg)
135 lines_after.append('# %s' % warnmsg)
136
137 return True
138
139 return False
140
141def register_recipe_handlers(handlers):
142 handlers.append((KernelModuleRecipeHandler(), 15))
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
deleted file mode 100644
index 3363a0e7ee..0000000000
--- a/scripts/lib/recipetool/create_npm.py
+++ /dev/null
@@ -1,299 +0,0 @@
1# Copyright (C) 2016 Intel Corporation
2# Copyright (C) 2020 Savoir-Faire Linux
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Recipe creation tool - npm module support plugin"""
7
8import json
9import logging
10import os
11import re
12import sys
13import tempfile
14import bb
15from bb.fetch2.npm import NpmEnvironment
16from bb.fetch2.npm import npm_package
17from bb.fetch2.npmsw import foreach_dependencies
18from recipetool.create import RecipeHandler
19from recipetool.create import match_licenses, find_license_files, generate_common_licenses_chksums
20from recipetool.create import split_pkg_licenses
21logger = logging.getLogger('recipetool')
22
23TINFOIL = None
24
25def tinfoil_init(instance):
26 """Initialize tinfoil"""
27 global TINFOIL
28 TINFOIL = instance
29
30class NpmRecipeHandler(RecipeHandler):
31 """Class to handle the npm recipe creation"""
32
33 @staticmethod
34 def _get_registry(lines):
35 """Get the registry value from the 'npm://registry' url"""
36 registry = None
37
38 def _handle_registry(varname, origvalue, op, newlines):
39 nonlocal registry
40 if origvalue.startswith("npm://"):
41 registry = re.sub(r"^npm://", "http://", origvalue.split(";")[0])
42 return origvalue, None, 0, True
43
44 bb.utils.edit_metadata(lines, ["SRC_URI"], _handle_registry)
45
46 return registry
47
48 @staticmethod
49 def _ensure_npm():
50 """Check if the 'npm' command is available in the recipes"""
51 if not TINFOIL.recipes_parsed:
52 TINFOIL.parse_recipes()
53
54 try:
55 d = TINFOIL.parse_recipe("nodejs-native")
56 except bb.providers.NoProvider:
57 bb.error("Nothing provides 'nodejs-native' which is required for the build")
58 bb.note("You will likely need to add a layer that provides nodejs")
59 sys.exit(14)
60
61 bindir = d.getVar("STAGING_BINDIR_NATIVE")
62 npmpath = os.path.join(bindir, "npm")
63
64 if not os.path.exists(npmpath):
65 TINFOIL.build_targets("nodejs-native", "addto_recipe_sysroot")
66
67 if not os.path.exists(npmpath):
68 bb.error("Failed to add 'npm' to sysroot")
69 sys.exit(14)
70
71 return bindir
72
73 @staticmethod
74 def _npm_global_configs(dev):
75 """Get the npm global configuration"""
76 configs = []
77
78 if dev:
79 configs.append(("also", "development"))
80 else:
81 configs.append(("only", "production"))
82
83 configs.append(("save", "false"))
84 configs.append(("package-lock", "false"))
85 configs.append(("shrinkwrap", "false"))
86 return configs
87
88 def _run_npm_install(self, d, srctree, registry, dev):
89 """Run the 'npm install' command without building the addons"""
90 configs = self._npm_global_configs(dev)
91 configs.append(("ignore-scripts", "true"))
92
93 if registry:
94 configs.append(("registry", registry))
95
96 bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
97
98 env = NpmEnvironment(d, configs=configs)
99 env.run("npm install", workdir=srctree)
100
101 def _generate_shrinkwrap(self, d, srctree, dev):
102 """Check and generate the 'npm-shrinkwrap.json' file if needed"""
103 configs = self._npm_global_configs(dev)
104
105 env = NpmEnvironment(d, configs=configs)
106 env.run("npm shrinkwrap", workdir=srctree)
107
108 return os.path.join(srctree, "npm-shrinkwrap.json")
109
110 def _handle_licenses(self, srctree, shrinkwrap_file, dev):
111 """Return the extra license files and the list of packages"""
112 licfiles = []
113 packages = {}
114 # Licenses from package.json will point to COMMON_LICENSE_DIR so we need
115 # to associate them explicitely to packages for split_pkg_licenses()
116 fallback_licenses = dict()
117
118 def _find_package_licenses(destdir):
119 """Either find license files, or use package.json metadata"""
120 def _get_licenses_from_package_json(package_json):
121 with open(os.path.join(srctree, package_json), "r") as f:
122 data = json.load(f)
123 if "license" in data:
124 licenses = data["license"].split(" ")
125 licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"]
126 return [], licenses
127 else:
128 return [package_json], None
129
130 basedir = os.path.join(srctree, destdir)
131 licfiles = find_license_files(basedir)
132 if len(licfiles) > 0:
133 return licfiles, None
134 else:
135 # A license wasn't found in the package directory, so we'll use the package.json metadata
136 pkg_json = os.path.join(basedir, "package.json")
137 return _get_licenses_from_package_json(pkg_json)
138
139 def _get_package_licenses(destdir, package):
140 (package_licfiles, package_licenses) = _find_package_licenses(destdir)
141 if package_licfiles:
142 licfiles.extend(package_licfiles)
143 else:
144 fallback_licenses[package] = package_licenses
145
146 # Handle the dependencies
147 def _handle_dependency(name, params, destdir):
148 deptree = destdir.split('node_modules/')
149 suffix = "-".join([npm_package(dep) for dep in deptree])
150 packages["${PN}" + suffix] = destdir
151 _get_package_licenses(destdir, "${PN}" + suffix)
152
153 with open(shrinkwrap_file, "r") as f:
154 shrinkwrap = json.load(f)
155 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
156
157 # Handle the parent package
158 packages["${PN}"] = ""
159 _get_package_licenses(srctree, "${PN}")
160
161 return licfiles, packages, fallback_licenses
162
163 # Handle the peer dependencies
164 def _handle_peer_dependency(self, shrinkwrap_file):
165 """Check if package has peer dependencies and show warning if it is the case"""
166 with open(shrinkwrap_file, "r") as f:
167 shrinkwrap = json.load(f)
168
169 packages = shrinkwrap.get("packages", {})
170 peer_deps = packages.get("", {}).get("peerDependencies", {})
171
172 for peer_dep in peer_deps:
173 peer_dep_yocto_name = npm_package(peer_dep)
174 bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
175 "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
176 % peer_dep_yocto_name)
177
178
179
180 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
181 """Handle the npm recipe creation"""
182
183 if "buildsystem" in handled:
184 return False
185
186 files = RecipeHandler.checkfiles(srctree, ["package.json"])
187
188 if not files:
189 return False
190
191 with open(files[0], "r") as f:
192 data = json.load(f)
193
194 if "name" not in data or "version" not in data:
195 return False
196
197 extravalues["PN"] = npm_package(data["name"])
198 extravalues["PV"] = data["version"]
199
200 if "description" in data:
201 extravalues["SUMMARY"] = data["description"]
202
203 if "homepage" in data:
204 extravalues["HOMEPAGE"] = data["homepage"]
205
206 dev = bb.utils.to_boolean(str(extravalues.get("NPM_INSTALL_DEV", "0")), False)
207 registry = self._get_registry(lines_before)
208
209 bb.note("Checking if npm is available ...")
210 # The native npm is used here (and not the host one) to ensure that the
211 # npm version is high enough to ensure an efficient dependency tree
212 # resolution and avoid issue with the shrinkwrap file format.
213 # Moreover the native npm is mandatory for the build.
214 bindir = self._ensure_npm()
215
216 d = bb.data.createCopy(TINFOIL.config_data)
217 d.prependVar("PATH", bindir + ":")
218 d.setVar("S", srctree)
219
220 bb.note("Generating shrinkwrap file ...")
221 # To generate the shrinkwrap file the dependencies have to be installed
222 # first. During the generation process some files may be updated /
223 # deleted. By default devtool tracks the diffs in the srctree and raises
224 # errors when finishing the recipe if some diffs are found.
225 git_exclude_file = os.path.join(srctree, ".git", "info", "exclude")
226 if os.path.exists(git_exclude_file):
227 with open(git_exclude_file, "r+") as f:
228 lines = f.readlines()
229 for line in ["/node_modules/", "/npm-shrinkwrap.json"]:
230 if line not in lines:
231 f.write(line + "\n")
232
233 lock_file = os.path.join(srctree, "package-lock.json")
234 lock_copy = lock_file + ".copy"
235 if os.path.exists(lock_file):
236 bb.utils.copyfile(lock_file, lock_copy)
237
238 self._run_npm_install(d, srctree, registry, dev)
239 shrinkwrap_file = self._generate_shrinkwrap(d, srctree, dev)
240
241 with open(shrinkwrap_file, "r") as f:
242 shrinkwrap = json.load(f)
243
244 if os.path.exists(lock_copy):
245 bb.utils.movefile(lock_copy, lock_file)
246
247 # Add the shrinkwrap file as 'extrafiles'
248 shrinkwrap_copy = shrinkwrap_file + ".copy"
249 bb.utils.copyfile(shrinkwrap_file, shrinkwrap_copy)
250 extravalues.setdefault("extrafiles", {})
251 extravalues["extrafiles"]["npm-shrinkwrap.json"] = shrinkwrap_copy
252
253 url_local = "npmsw://%s" % shrinkwrap_file
254 url_recipe= "npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json"
255
256 if dev:
257 url_local += ";dev=1"
258 url_recipe += ";dev=1"
259
260 # Add the npmsw url in the SRC_URI of the generated recipe
261 def _handle_srcuri(varname, origvalue, op, newlines):
262 """Update the version value and add the 'npmsw://' url"""
263 value = origvalue.replace("version=" + data["version"], "version=${PV}")
264 value = value.replace("version=latest", "version=${PV}")
265 values = [line.strip() for line in value.strip('\n').splitlines()]
266 if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
267 values.append(url_recipe)
268 return values, None, 4, False
269
270 (_, newlines) = bb.utils.edit_metadata(lines_before, ["SRC_URI"], _handle_srcuri)
271 lines_before[:] = [line.rstrip('\n') for line in newlines]
272
273 # In order to generate correct licence checksums in the recipe the
274 # dependencies have to be fetched again using the npmsw url
275 bb.note("Fetching npm dependencies ...")
276 bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
277 fetcher = bb.fetch2.Fetch([url_local], d)
278 fetcher.download()
279 fetcher.unpack(srctree)
280
281 bb.note("Handling licences ...")
282 (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev)
283 licvalues = match_licenses(licfiles, srctree, d)
284 split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses)
285 fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist]
286 extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d)
287 extravalues["LICENSE"] = fallback_licenses_flat
288
289 classes.append("npm")
290 handled.append("buildsystem")
291
292 # Check if package has peer dependencies and inform the user
293 self._handle_peer_dependency(shrinkwrap_file)
294
295 return True
296
297def register_recipe_handlers(handlers):
298 """Register the npm handler"""
299 handlers.append((NpmRecipeHandler(), 60))
diff --git a/scripts/lib/recipetool/edit.py b/scripts/lib/recipetool/edit.py
deleted file mode 100644
index d5b980a1c0..0000000000
--- a/scripts/lib/recipetool/edit.py
+++ /dev/null
@@ -1,44 +0,0 @@
1# Recipe creation tool - edit plugin
2#
3# This sub-command edits the recipe and appends for the specified target
4#
5# Example: recipetool edit busybox
6#
7# Copyright (C) 2018 Mentor Graphics Corporation
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import argparse
13import errno
14import logging
15import os
16import re
17import subprocess
18import sys
19import scriptutils
20
21
22logger = logging.getLogger('recipetool')
23tinfoil = None
24
25
26def tinfoil_init(instance):
27 global tinfoil
28 tinfoil = instance
29
30
31def edit(args):
32 import oe.recipeutils
33
34 recipe_path = tinfoil.get_recipe_file(args.target)
35 appends = tinfoil.get_file_appends(recipe_path)
36
37 return scriptutils.run_editor([recipe_path] + list(appends), logger)
38
39
40def register_commands(subparsers):
41 parser = subparsers.add_parser('edit',
42 help='Edit the recipe and appends for the specified target. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.')
43 parser.add_argument('target', help='Target recipe/provide to edit')
44 parser.set_defaults(func=edit, parserecipes=True)
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv
deleted file mode 100644
index 80851111b3..0000000000
--- a/scripts/lib/recipetool/licenses.csv
+++ /dev/null
@@ -1,37 +0,0 @@
10636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only
212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only
318810669f13b87348459e611d31ab760,GPL-2.0-only
4252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only
52d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only
63214f080875748938ba060314b4f727d,LGPL-2.0-only
7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only
8393a5ca445f6965873eca0259a17f833,GPL-2.0-only
93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
103bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only
114325afd396febcb659c36b49533135d4,GPL-2.0-only
124fbd65380cdd255951079008b364516c,LGPL-2.1-only
1354c7042be62e169199200bc6477f04d1,BSD-3-Clause
1455ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only
1559530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only
165f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only
176a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only
18751419260aa954499f7abaabaa882bbe,GPL-2.0-only
197fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only
208ca43cbc842c2336e835926c2166c28b,GPL-2.0-only
2194d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only
229ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only
239f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only
24a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only
25b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only
26bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only
27bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only
28c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only
29d32239bcb673463ab874e80d47fae504,GPL-3.0-only
30d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only
31d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only
32db979804f025cf55aabec7129cb671ed,LGPL-2.0-only
33eb723b61539feef013de476e68b5c50a,GPL-2.0-only
34ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only
35f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only
36fad9b3332be894bab9bc501572864b29,LGPL-2.1-only
37fbc093901857fcd118f065f900982c24,LGPL-2.1-only
diff --git a/scripts/lib/recipetool/newappend.py b/scripts/lib/recipetool/newappend.py
deleted file mode 100644
index 08e2474dc4..0000000000
--- a/scripts/lib/recipetool/newappend.py
+++ /dev/null
@@ -1,79 +0,0 @@
1# Recipe creation tool - newappend plugin
2#
3# This sub-command creates a bbappend for the specified target and prints the
4# path to the bbappend.
5#
6# Example: recipetool newappend meta-mylayer busybox
7#
8# Copyright (C) 2015 Christopher Larson <kergoth@gmail.com>
9#
10# SPDX-License-Identifier: GPL-2.0-only
11#
12
13import argparse
14import errno
15import logging
16import os
17import re
18import subprocess
19import sys
20import scriptutils
21
22
23logger = logging.getLogger('recipetool')
24tinfoil = None
25
26
27def tinfoil_init(instance):
28 global tinfoil
29 tinfoil = instance
30
31
32def layer(layerpath):
33 if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')):
34 raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath))
35 return layerpath
36
37
38def newappend(args):
39 import oe.recipeutils
40
41 recipe_path = tinfoil.get_recipe_file(args.target)
42
43 rd = tinfoil.config_data.createCopy()
44 rd.setVar('FILE', recipe_path)
45 append_path, path_ok = oe.recipeutils.get_bbappend_path(rd, args.destlayer, args.wildcard_version)
46 if not append_path:
47 logger.error('Unable to determine layer directory containing %s', recipe_path)
48 return 1
49
50 if not path_ok:
51 logger.warning('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path))
52
53 layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
54 if not os.path.abspath(args.destlayer) in layerdirs:
55 logger.warning('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active')
56
57 if not os.path.exists(append_path):
58 bb.utils.mkdirhier(os.path.dirname(append_path))
59
60 try:
61 open(append_path, 'a').close()
62 except (OSError, IOError) as exc:
63 logger.critical(str(exc))
64 return 1
65
66 if args.edit:
67 return scriptutils.run_editor([append_path, recipe_path], logger)
68 else:
69 print(append_path)
70
71
72def register_commands(subparsers):
73 parser = subparsers.add_parser('newappend',
74 help='Create a bbappend for the specified target in the specified layer')
75 parser.add_argument('-e', '--edit', help='Edit the new append. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.', action='store_true')
76 parser.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
77 parser.add_argument('destlayer', help='Base directory of the destination layer to write the bbappend to', type=layer)
78 parser.add_argument('target', help='Target recipe/provide to append')
79 parser.set_defaults(func=newappend, parserecipes=True)
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py
deleted file mode 100644
index b5ad335cae..0000000000
--- a/scripts/lib/recipetool/setvar.py
+++ /dev/null
@@ -1,66 +0,0 @@
1# Recipe creation tool - set variable plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import sys
9import os
10import argparse
11import glob
12import fnmatch
13import re
14import logging
15import scriptutils
16
17logger = logging.getLogger('recipetool')
18
19tinfoil = None
20plugins = None
21
22def tinfoil_init(instance):
23 global tinfoil
24 tinfoil = instance
25
26def setvar(args):
27 import oe.recipeutils
28
29 if args.delete:
30 if args.value:
31 logger.error('-D/--delete and specifying a value are mutually exclusive')
32 return 1
33 value = None
34 else:
35 if args.value is None:
36 logger.error('You must specify a value if not using -D/--delete')
37 return 1
38 value = args.value
39 varvalues = {args.varname: value}
40
41 if args.recipe_only:
42 patches = [oe.recipeutils.patch_recipe_file(args.recipefile, varvalues, patch=args.patch)]
43 else:
44 rd = tinfoil.parse_recipe_file(args.recipefile, False)
45 if not rd:
46 return 1
47 patches = oe.recipeutils.patch_recipe(rd, args.recipefile, varvalues, patch=args.patch)
48 if args.patch:
49 for patch in patches:
50 for line in patch:
51 sys.stdout.write(line)
52 tinfoil.modified_files()
53 return 0
54
55
56def register_commands(subparsers):
57 parser_setvar = subparsers.add_parser('setvar',
58 help='Set a variable within a recipe',
59 description='Adds/updates the value a variable is set to in a recipe')
60 parser_setvar.add_argument('recipefile', help='Recipe file to update')
61 parser_setvar.add_argument('varname', help='Variable name to set')
62 parser_setvar.add_argument('value', nargs='?', help='New value to set the variable to')
63 parser_setvar.add_argument('--recipe-only', '-r', help='Do not set variable in any include file if present', action='store_true')
64 parser_setvar.add_argument('--patch', '-p', help='Create a patch to make the change instead of modifying the recipe', action='store_true')
65 parser_setvar.add_argument('--delete', '-D', help='Delete the specified value instead of setting it', action='store_true')
66 parser_setvar.set_defaults(func=setvar)
diff --git a/scripts/lib/resulttool/__init__.py b/scripts/lib/resulttool/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/resulttool/__init__.py
+++ /dev/null
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py
deleted file mode 100644
index c7a53dc550..0000000000
--- a/scripts/lib/resulttool/junit.py
+++ /dev/null
@@ -1,77 +0,0 @@
1# resulttool - report test results in JUnit XML format
2#
3# Copyright (c) 2024, Siemens AG.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import re
10import xml.etree.ElementTree as ET
11import resulttool.resultutils as resultutils
12
13def junit(args, logger):
14 testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map)
15
16 total_time = 0
17 skipped = 0
18 failures = 0
19 errors = 0
20
21 for tests in testresults.values():
22 results = tests[next(reversed(tests))].get("result", {})
23
24 for result_id, result in results.items():
25 # filter out ptestresult.rawlogs and ptestresult.sections
26 if re.search(r'\.test_', result_id):
27 total_time += result.get("duration", 0)
28
29 if result['status'] == "FAILED":
30 failures += 1
31 elif result['status'] == "ERROR":
32 errors += 1
33 elif result['status'] == "SKIPPED":
34 skipped += 1
35
36 testsuites_node = ET.Element("testsuites")
37 testsuites_node.set("time", "%s" % total_time)
38 testsuite_node = ET.SubElement(testsuites_node, "testsuite")
39 testsuite_node.set("name", "Testimage")
40 testsuite_node.set("time", "%s" % total_time)
41 testsuite_node.set("tests", "%s" % len(results))
42 testsuite_node.set("failures", "%s" % failures)
43 testsuite_node.set("errors", "%s" % errors)
44 testsuite_node.set("skipped", "%s" % skipped)
45
46 for result_id, result in results.items():
47 if re.search(r'\.test_', result_id):
48 testcase_node = ET.SubElement(testsuite_node, "testcase", {
49 "name": result_id,
50 "classname": "Testimage",
51 "time": str(result['duration'])
52 })
53 if result['status'] == "SKIPPED":
54 ET.SubElement(testcase_node, "skipped", message=result['log'])
55 elif result['status'] == "FAILED":
56 ET.SubElement(testcase_node, "failure", message=result['log'])
57 elif result['status'] == "ERROR":
58 ET.SubElement(testcase_node, "error", message=result['log'])
59
60 tree = ET.ElementTree(testsuites_node)
61
62 if args.junit_xml_path is None:
63 args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml'
64 tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True)
65
66 logger.info('Saved JUnit XML report as %s' % args.junit_xml_path)
67
68def register_commands(subparsers):
69 """Register subcommands from this plugin"""
70 parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format',
71 description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.',
72 group='analysis')
73 parser_build.set_defaults(func=junit)
74 parser_build.add_argument('json_file',
75 help='json file should point to the testresults.json')
76 parser_build.add_argument('-j', '--junit_xml_path',
77 help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml')
diff --git a/scripts/lib/resulttool/log.py b/scripts/lib/resulttool/log.py
deleted file mode 100644
index 15148ca288..0000000000
--- a/scripts/lib/resulttool/log.py
+++ /dev/null
@@ -1,107 +0,0 @@
1# resulttool - Show logs
2#
3# Copyright (c) 2019 Garmin International
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7import os
8import resulttool.resultutils as resultutils
9
10def show_ptest(result, ptest, logger):
11 logdata = resultutils.ptestresult_get_log(result, ptest)
12 if logdata is not None:
13 print(logdata)
14 return 0
15
16 print("ptest '%s' log not found" % ptest)
17 return 1
18
19def show_reproducible(result, reproducible, logger):
20 try:
21 print(result['reproducible'][reproducible]['diffoscope.text'])
22 return 0
23
24 except KeyError:
25 print("reproducible '%s' not found" % reproducible)
26 return 1
27
28def log(args, logger):
29 results = resultutils.load_resultsdata(args.source)
30
31 for _, run_name, _, r in resultutils.test_run_results(results):
32 if args.list_ptest:
33 print('\n'.join(sorted(r['ptestresult.sections'].keys())))
34
35 if args.dump_ptest:
36 for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']:
37 if sectname in r:
38 for name, ptest in r[sectname].items():
39 logdata = resultutils.generic_get_log(sectname, r, name)
40 if logdata is not None:
41 dest_dir = args.dump_ptest
42 if args.prepend_run:
43 dest_dir = os.path.join(dest_dir, run_name)
44 if not sectname.startswith("ptest"):
45 dest_dir = os.path.join(dest_dir, sectname.split(".")[0])
46
47 os.makedirs(dest_dir, exist_ok=True)
48 dest = os.path.join(dest_dir, '%s.log' % name)
49 if os.path.exists(dest):
50 print("Overlapping ptest logs found, skipping %s. The '--prepend-run' option would avoid this" % name)
51 continue
52 print(dest)
53 with open(dest, 'w') as f:
54 f.write(logdata)
55
56 if args.raw_ptest:
57 found = False
58 for sectname in ['ptestresult.rawlogs', 'ltpposixresult.rawlogs', 'ltpresult.rawlogs']:
59 rawlog = resultutils.generic_get_rawlogs(sectname, r)
60 if rawlog is not None:
61 print(rawlog)
62 found = True
63 if not found:
64 print('Raw ptest logs not found')
65 return 1
66
67 if args.raw_reproducible:
68 if 'reproducible.rawlogs' in r:
69 print(r['reproducible.rawlogs']['log'])
70 else:
71 print('Raw reproducible logs not found')
72 return 1
73
74 for ptest in args.ptest:
75 if not show_ptest(r, ptest, logger):
76 return 1
77
78 for reproducible in args.reproducible:
79 if not show_reproducible(r, reproducible, logger):
80 return 1
81
82def register_commands(subparsers):
83 """Register subcommands from this plugin"""
84 parser = subparsers.add_parser('log', help='show logs',
85 description='show the logs from test results',
86 group='analysis')
87 parser.set_defaults(func=log)
88 parser.add_argument('source',
89 help='the results file/directory/URL to import')
90 parser.add_argument('--list-ptest', action='store_true',
91 help='list the ptest test names')
92 parser.add_argument('--ptest', action='append', default=[],
93 help='show logs for a ptest')
94 parser.add_argument('--dump-ptest', metavar='DIR',
95 help='Dump all ptest log files to the specified directory.')
96 parser.add_argument('--reproducible', action='append', default=[],
97 help='show logs for a reproducible test')
98 parser.add_argument('--prepend-run', action='store_true',
99 help='''Dump ptest results to a subdirectory named after the test run when using --dump-ptest.
100 Required if more than one test run is present in the result file''')
101 parser.add_argument('--raw', action='store_true',
102 help='show raw (ptest) logs. Deprecated. Alias for "--raw-ptest"', dest='raw_ptest')
103 parser.add_argument('--raw-ptest', action='store_true',
104 help='show raw ptest log')
105 parser.add_argument('--raw-reproducible', action='store_true',
106 help='show raw reproducible build logs')
107
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py
deleted file mode 100755
index ae0861ac6b..0000000000
--- a/scripts/lib/resulttool/manualexecution.py
+++ /dev/null
@@ -1,235 +0,0 @@
1# test case management tool - manual execution from testopia test cases
2#
3# Copyright (c) 2018, Intel Corporation.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import argparse
9import json
10import os
11import sys
12import datetime
13import re
14import copy
15from oeqa.core.runner import OETestResultJSONHelper
16
17
18def load_json_file(f):
19 with open(f, "r") as filedata:
20 return json.load(filedata)
21
22def write_json_file(f, json_data):
23 os.makedirs(os.path.dirname(f), exist_ok=True)
24 with open(f, 'w') as filedata:
25 filedata.write(json.dumps(json_data, sort_keys=True, indent=1))
26
27class ManualTestRunner(object):
28
29 def _get_test_module(self, case_file):
30 return os.path.basename(case_file).split('.')[0]
31
32 def _get_input(self, config):
33 while True:
34 output = input('{} = '.format(config))
35 if re.match('^[a-z0-9-.]+$', output):
36 break
37 print('Only lowercase alphanumeric, hyphen and dot are allowed. Please try again')
38 return output
39
40 def _get_available_config_options(self, config_options, test_module, target_config):
41 avail_config_options = None
42 if test_module in config_options:
43 avail_config_options = config_options[test_module].get(target_config)
44 return avail_config_options
45
46 def _choose_config_option(self, options):
47 while True:
48 output = input('{} = '.format('Option index number'))
49 if output in options:
50 break
51 print('Only integer index inputs from above available configuration options are allowed. Please try again.')
52 return options[output]
53
54 def _get_config(self, config_options, test_module):
55 from oeqa.utils.metadata import get_layers
56 from oeqa.utils.commands import get_bb_var
57 from resulttool.resultutils import store_map
58
59 layers = get_layers(get_bb_var('BBLAYERS'))
60 configurations = {}
61 configurations['LAYERS'] = layers
62 configurations['STARTTIME'] = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
63 configurations['TEST_TYPE'] = 'manual'
64 configurations['TEST_MODULE'] = test_module
65
66 extra_config = set(store_map['manual']) - set(configurations)
67 for config in sorted(extra_config):
68 avail_config_options = self._get_available_config_options(config_options, test_module, config)
69 if avail_config_options:
70 print('---------------------------------------------')
71 print('These are available configuration #%s options:' % config)
72 print('---------------------------------------------')
73 for option, _ in sorted(avail_config_options.items(), key=lambda x: int(x[0])):
74 print('%s: %s' % (option, avail_config_options[option]))
75 print('Please select configuration option, enter the integer index number.')
76 value_conf = self._choose_config_option(avail_config_options)
77 print('---------------------------------------------\n')
78 else:
79 print('---------------------------------------------')
80 print('This is configuration #%s. Please provide configuration value(use "None" if not applicable).' % config)
81 print('---------------------------------------------')
82 value_conf = self._get_input('Configuration Value')
83 print('---------------------------------------------\n')
84 configurations[config] = value_conf
85 return configurations
86
87 def _execute_test_steps(self, case):
88 test_result = {}
89 print('------------------------------------------------------------------------')
90 print('Executing test case: %s' % case['test']['@alias'])
91 print('------------------------------------------------------------------------')
92 print('You have total %s test steps to be executed.' % len(case['test']['execution']))
93 print('------------------------------------------------------------------------\n')
94 for step, _ in sorted(case['test']['execution'].items(), key=lambda x: int(x[0])):
95 print('Step %s: %s' % (step, case['test']['execution'][step]['action']))
96 expected_output = case['test']['execution'][step]['expected_results']
97 if expected_output:
98 print('Expected output: %s' % expected_output)
99 while True:
100 done = input('\nPlease provide test results: (P)assed/(F)ailed/(B)locked/(S)kipped? \n').lower()
101 result_types = {'p':'PASSED',
102 'f':'FAILED',
103 'b':'BLOCKED',
104 's':'SKIPPED'}
105 if done in result_types:
106 for r in result_types:
107 if done == r:
108 res = result_types[r]
109 if res == 'FAILED':
110 log_input = input('\nPlease enter the error and the description of the log: (Ex:log:211 Error Bitbake)\n')
111 test_result.update({case['test']['@alias']: {'status': '%s' % res, 'log': '%s' % log_input}})
112 else:
113 test_result.update({case['test']['@alias']: {'status': '%s' % res}})
114 break
115 print('Invalid input!')
116 return test_result
117
118 def _get_write_dir(self):
119 return os.environ['BUILDDIR'] + '/tmp/log/manual/'
120
121 def run_test(self, case_file, config_options_file, testcase_config_file):
122 test_module = self._get_test_module(case_file)
123 cases = load_json_file(case_file)
124 config_options = {}
125 if config_options_file:
126 config_options = load_json_file(config_options_file)
127 configurations = self._get_config(config_options, test_module)
128 result_id = 'manual_%s_%s' % (test_module, configurations['STARTTIME'])
129 test_results = {}
130 if testcase_config_file:
131 test_case_config = load_json_file(testcase_config_file)
132 test_case_to_execute = test_case_config['testcases']
133 for case in copy.deepcopy(cases) :
134 if case['test']['@alias'] not in test_case_to_execute:
135 cases.remove(case)
136
137 print('\nTotal number of test cases in this test suite: %s\n' % len(cases))
138 for c in cases:
139 test_result = self._execute_test_steps(c)
140 test_results.update(test_result)
141 return configurations, result_id, self._get_write_dir(), test_results
142
143 def _get_true_false_input(self, input_message):
144 yes_list = ['Y', 'YES']
145 no_list = ['N', 'NO']
146 while True:
147 more_config_option = input(input_message).upper()
148 if more_config_option in yes_list or more_config_option in no_list:
149 break
150 print('Invalid input!')
151 if more_config_option in no_list:
152 return False
153 return True
154
155 def make_config_option_file(self, logger, case_file, config_options_file):
156 config_options = {}
157 if config_options_file:
158 config_options = load_json_file(config_options_file)
159 new_test_module = self._get_test_module(case_file)
160 print('Creating configuration options file for test module: %s' % new_test_module)
161 new_config_options = {}
162
163 while True:
164 config_name = input('\nPlease provide test configuration to create:\n').upper()
165 new_config_options[config_name] = {}
166 while True:
167 config_value = self._get_input('Configuration possible option value')
168 config_option_index = len(new_config_options[config_name]) + 1
169 new_config_options[config_name][config_option_index] = config_value
170 more_config_option = self._get_true_false_input('\nIs there more configuration option input: (Y)es/(N)o\n')
171 if not more_config_option:
172 break
173 more_config = self._get_true_false_input('\nIs there more configuration to create: (Y)es/(N)o\n')
174 if not more_config:
175 break
176
177 if new_config_options:
178 config_options[new_test_module] = new_config_options
179 if not config_options_file:
180 config_options_file = os.path.join(self._get_write_dir(), 'manual_config_options.json')
181 write_json_file(config_options_file, config_options)
182 logger.info('Configuration option file created at %s' % config_options_file)
183
184 def make_testcase_config_file(self, logger, case_file, testcase_config_file):
185 if testcase_config_file:
186 if os.path.exists(testcase_config_file):
187 print('\nTest configuration file with name %s already exists. Please provide a unique file name' % (testcase_config_file))
188 return 0
189
190 if not testcase_config_file:
191 testcase_config_file = os.path.join(self._get_write_dir(), "testconfig_new.json")
192
193 testcase_config = {}
194 cases = load_json_file(case_file)
195 new_test_module = self._get_test_module(case_file)
196 new_testcase_config = {}
197 new_testcase_config['testcases'] = []
198
199 print('\nAdd testcases for this configuration file:')
200 for case in cases:
201 print('\n' + case['test']['@alias'])
202 add_tc_config = self._get_true_false_input('\nDo you want to add this test case to test configuration : (Y)es/(N)o\n')
203 if add_tc_config:
204 new_testcase_config['testcases'].append(case['test']['@alias'])
205 write_json_file(testcase_config_file, new_testcase_config)
206 logger.info('Testcase Configuration file created at %s' % testcase_config_file)
207
208def manualexecution(args, logger):
209 testrunner = ManualTestRunner()
210 if args.make_config_options_file:
211 testrunner.make_config_option_file(logger, args.file, args.config_options_file)
212 return 0
213 if args.make_testcase_config_file:
214 testrunner.make_testcase_config_file(logger, args.file, args.testcase_config_file)
215 return 0
216 configurations, result_id, write_dir, test_results = testrunner.run_test(args.file, args.config_options_file, args.testcase_config_file)
217 resultjsonhelper = OETestResultJSONHelper()
218 resultjsonhelper.dump_testresult_file(write_dir, configurations, result_id, test_results)
219 return 0
220
221def register_commands(subparsers):
222 """Register subcommands from this plugin"""
223 parser_build = subparsers.add_parser('manualexecution', help='helper script for results populating during manual test execution.',
224 description='helper script for results populating during manual test execution. You can find manual test case JSON file in meta/lib/oeqa/manual/',
225 group='manualexecution')
226 parser_build.set_defaults(func=manualexecution)
227 parser_build.add_argument('file', help='specify path to manual test case JSON file.Note: Please use \"\" to encapsulate the file path.')
228 parser_build.add_argument('-c', '--config-options-file', default='',
229 help='the config options file to import and used as available configuration option selection or make config option file')
230 parser_build.add_argument('-m', '--make-config-options-file', action='store_true',
231 help='make the configuration options file based on provided inputs')
232 parser_build.add_argument('-t', '--testcase-config-file', default='',
233 help='the testcase configuration file to enable user to run a selected set of test case or make a testcase configuration file')
234 parser_build.add_argument('-d', '--make-testcase-config-file', action='store_true',
235 help='make the testcase configuration file to run a set of test cases based on user selection') \ No newline at end of file
diff --git a/scripts/lib/resulttool/merge.py b/scripts/lib/resulttool/merge.py
deleted file mode 100644
index 18b4825a18..0000000000
--- a/scripts/lib/resulttool/merge.py
+++ /dev/null
@@ -1,46 +0,0 @@
1# resulttool - merge multiple testresults.json files into a file or directory
2#
3# Copyright (c) 2019, Intel Corporation.
4# Copyright (c) 2019, Linux Foundation
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import os
10import json
11import resulttool.resultutils as resultutils
12
13def merge(args, logger):
14 configvars = {}
15 if not args.not_add_testseries:
16 configvars = resultutils.extra_configvars.copy()
17 if args.executed_by:
18 configvars['EXECUTED_BY'] = args.executed_by
19 if resultutils.is_url(args.target_results) or os.path.isdir(args.target_results):
20 results = resultutils.load_resultsdata(args.target_results, configmap=resultutils.store_map, configvars=configvars)
21 resultutils.append_resultsdata(results, args.base_results, configmap=resultutils.store_map, configvars=configvars)
22 resultutils.save_resultsdata(results, args.target_results)
23 else:
24 results = resultutils.load_resultsdata(args.base_results, configmap=resultutils.flatten_map, configvars=configvars)
25 if os.path.exists(args.target_results):
26 resultutils.append_resultsdata(results, args.target_results, configmap=resultutils.flatten_map, configvars=configvars)
27 resultutils.save_resultsdata(results, os.path.dirname(args.target_results), fn=os.path.basename(args.target_results))
28
29 logger.info('Merged results to %s' % os.path.dirname(args.target_results))
30
31 return 0
32
33def register_commands(subparsers):
34 """Register subcommands from this plugin"""
35 parser_build = subparsers.add_parser('merge', help='merge test result files/directories/URLs',
36 description='merge the results from multiple files/directories/URLs into the target file or directory',
37 group='setup')
38 parser_build.set_defaults(func=merge)
39 parser_build.add_argument('base_results',
40 help='the results file/directory/URL to import')
41 parser_build.add_argument('target_results',
42 help='the target file or directory to merge the base_results with')
43 parser_build.add_argument('-t', '--not-add-testseries', action='store_true',
44 help='do not add testseries configuration to results')
45 parser_build.add_argument('-x', '--executed-by', default='',
46 help='add executed-by configuration to each result file')
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py
deleted file mode 100644
index 33b3119c54..0000000000
--- a/scripts/lib/resulttool/regression.py
+++ /dev/null
@@ -1,450 +0,0 @@
1# resulttool - regression analysis
2#
3# Copyright (c) 2019, Intel Corporation.
4# Copyright (c) 2019, Linux Foundation
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import resulttool.resultutils as resultutils
10
11from oeqa.utils.git import GitRepo
12import oeqa.utils.gitarchive as gitarchive
13
14METADATA_MATCH_TABLE = {
15 "oeselftest": "OESELFTEST_METADATA"
16}
17
18OESELFTEST_METADATA_GUESS_TABLE={
19 "trigger-build-posttrigger": {
20 "run_all_tests": False,
21 "run_tests":["buildoptions.SourceMirroring.test_yocto_source_mirror"],
22 "skips": None,
23 "machine": None,
24 "select_tags":None,
25 "exclude_tags": None
26 },
27 "reproducible": {
28 "run_all_tests": False,
29 "run_tests":["reproducible"],
30 "skips": None,
31 "machine": None,
32 "select_tags":None,
33 "exclude_tags": None
34 },
35 "arch-qemu-quick": {
36 "run_all_tests": True,
37 "run_tests":None,
38 "skips": None,
39 "machine": None,
40 "select_tags":["machine"],
41 "exclude_tags": None
42 },
43 "arch-qemu-full-x86-or-x86_64": {
44 "run_all_tests": True,
45 "run_tests":None,
46 "skips": None,
47 "machine": None,
48 "select_tags":["machine", "toolchain-system"],
49 "exclude_tags": None
50 },
51 "arch-qemu-full-others": {
52 "run_all_tests": True,
53 "run_tests":None,
54 "skips": None,
55 "machine": None,
56 "select_tags":["machine", "toolchain-user"],
57 "exclude_tags": None
58 },
59 "selftest": {
60 "run_all_tests": True,
61 "run_tests":None,
62 "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"],
63 "machine": None,
64 "select_tags":None,
65 "exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
66 },
67 "bringup": {
68 "run_all_tests": True,
69 "run_tests":None,
70 "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"],
71 "machine": None,
72 "select_tags":None,
73 "exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
74 }
75}
76
77STATUS_STRINGS = {
78 "None": "No matching test result"
79}
80
81REGRESSIONS_DISPLAY_LIMIT=50
82
83MISSING_TESTS_BANNER = "-------------------------- Missing tests --------------------------"
84ADDITIONAL_DATA_BANNER = "--------------------- Matches and improvements --------------------"
85
86def test_has_at_least_one_matching_tag(test, tag_list):
87 return "oetags" in test and any(oetag in tag_list for oetag in test["oetags"])
88
89def all_tests_have_at_least_one_matching_tag(results, tag_list):
90 return all(test_has_at_least_one_matching_tag(test_result, tag_list) or test_name.startswith("ptestresult") for (test_name, test_result) in results.items())
91
92def any_test_have_any_matching_tag(results, tag_list):
93 return any(test_has_at_least_one_matching_tag(test, tag_list) for test in results.values())
94
95def have_skipped_test(result, test_prefix):
96 return all( result[test]['status'] == "SKIPPED" for test in result if test.startswith(test_prefix))
97
98def have_all_tests_skipped(result, test_prefixes_list):
99 return all(have_skipped_test(result, test_prefix) for test_prefix in test_prefixes_list)
100
101def guess_oeselftest_metadata(results):
102 """
103 When an oeselftest test result is lacking OESELFTEST_METADATA, we can try to guess it based on results content.
104 Check results for specific values (absence/presence of oetags, number and name of executed tests...),
105 and if it matches one of known configuration from autobuilder configuration, apply guessed OSELFTEST_METADATA
106 to it to allow proper test filtering.
107 This guessing process is tightly coupled to config.json in autobuilder. It should trigger less and less,
108 as new tests will have OESELFTEST_METADATA properly appended at test reporting time
109 """
110
111 if len(results) == 1 and "buildoptions.SourceMirroring.test_yocto_source_mirror" in results:
112 return OESELFTEST_METADATA_GUESS_TABLE['trigger-build-posttrigger']
113 elif all(result.startswith("reproducible") for result in results):
114 return OESELFTEST_METADATA_GUESS_TABLE['reproducible']
115 elif all_tests_have_at_least_one_matching_tag(results, ["machine"]):
116 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-quick']
117 elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-system"]):
118 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-x86-or-x86_64']
119 elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-user"]):
120 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-others']
121 elif not any_test_have_any_matching_tag(results, ["machine", "toolchain-user", "toolchain-system"]):
122 if have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"]):
123 return OESELFTEST_METADATA_GUESS_TABLE['selftest']
124 elif have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"]):
125 return OESELFTEST_METADATA_GUESS_TABLE['bringup']
126
127 return None
128
129
130def metadata_matches(base_configuration, target_configuration):
131 """
132 For passed base and target, check test type. If test type matches one of
133 properties described in METADATA_MATCH_TABLE, compare metadata if it is
134 present in base. Return true if metadata matches, or if base lacks some
135 data (either TEST_TYPE or the corresponding metadata)
136 """
137 test_type = base_configuration.get('TEST_TYPE')
138 if test_type not in METADATA_MATCH_TABLE:
139 return True
140
141 metadata_key = METADATA_MATCH_TABLE.get(test_type)
142 if target_configuration.get(metadata_key) != base_configuration.get(metadata_key):
143 return False
144
145 return True
146
147
148def machine_matches(base_configuration, target_configuration):
149 return base_configuration.get('MACHINE') == target_configuration.get('MACHINE')
150
151
152def can_be_compared(logger, base, target):
153 """
154 Some tests are not relevant to be compared, for example some oeselftest
155 run with different tests sets or parameters. Return true if tests can be
156 compared
157 """
158 ret = True
159 base_configuration = base['configuration']
160 target_configuration = target['configuration']
161
162 # Older test results lack proper OESELFTEST_METADATA: if not present, try to guess it based on tests results.
163 if base_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in base_configuration:
164 guess = guess_oeselftest_metadata(base['result'])
165 if guess is None:
166 logger.error(f"ERROR: did not manage to guess oeselftest metadata for {base_configuration['STARTTIME']}")
167 else:
168 logger.debug(f"Enriching {base_configuration['STARTTIME']} with {guess}")
169 base_configuration['OESELFTEST_METADATA'] = guess
170 if target_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in target_configuration:
171 guess = guess_oeselftest_metadata(target['result'])
172 if guess is None:
173 logger.error(f"ERROR: did not manage to guess oeselftest metadata for {target_configuration['STARTTIME']}")
174 else:
175 logger.debug(f"Enriching {target_configuration['STARTTIME']} with {guess}")
176 target_configuration['OESELFTEST_METADATA'] = guess
177
178 # Test runs with LTP results in should only be compared with other runs with LTP tests in them
179 if base_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in base['result']):
180 ret = target_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in target['result'])
181
182 return ret and metadata_matches(base_configuration, target_configuration) \
183 and machine_matches(base_configuration, target_configuration)
184
185def get_status_str(raw_status):
186 raw_status_lower = raw_status.lower() if raw_status else "None"
187 return STATUS_STRINGS.get(raw_status_lower, raw_status)
188
189def get_additional_info_line(new_pass_count, new_tests):
190 result=[]
191 if new_tests:
192 result.append(f'+{new_tests} test(s) present')
193 if new_pass_count:
194 result.append(f'+{new_pass_count} test(s) now passing')
195
196 if not result:
197 return ""
198
199 return ' -> ' + ', '.join(result) + '\n'
200
201def compare_result(logger, base_name, target_name, base_result, target_result, display_limit=None):
202 base_result = base_result.get('result')
203 target_result = target_result.get('result')
204 result = {}
205 new_tests = 0
206 regressions = {}
207 resultstring = ""
208 new_tests = 0
209 new_pass_count = 0
210
211 display_limit = int(display_limit) if display_limit else REGRESSIONS_DISPLAY_LIMIT
212
213 if base_result and target_result:
214 for k in base_result:
215 if k in ['ptestresult.rawlogs', 'ptestresult.sections']:
216 continue
217 base_testcase = base_result[k]
218 base_status = base_testcase.get('status')
219 if base_status:
220 target_testcase = target_result.get(k, {})
221 target_status = target_testcase.get('status')
222 if base_status != target_status:
223 result[k] = {'base': base_status, 'target': target_status}
224 else:
225 logger.error('Failed to retrieved base test case status: %s' % k)
226
227 # Also count new tests that were not present in base results: it
228 # could be newly added tests, but it could also highlights some tests
229 # renames or fixed faulty ptests
230 for k in target_result:
231 if k not in base_result:
232 new_tests += 1
233 if result:
234 new_pass_count = sum(test['target'] is not None and test['target'].startswith("PASS") for test in result.values())
235 # Print a regression report only if at least one test has a regression status (FAIL, SKIPPED, absent...)
236 if new_pass_count < len(result):
237 resultstring = "Regression: %s\n %s\n" % (base_name, target_name)
238 for k in sorted(result):
239 if not result[k]['target'] or not result[k]['target'].startswith("PASS"):
240 # Differentiate each ptest kind when listing regressions
241 key_parts = k.split('.')
242 key = '.'.join(key_parts[:2]) if k.startswith('ptest') else key_parts[0]
243 # Append new regression to corresponding test family
244 regressions[key] = regressions.setdefault(key, []) + [' %s: %s -> %s\n' % (k, get_status_str(result[k]['base']), get_status_str(result[k]['target']))]
245 resultstring += f" Total: {sum([len(regressions[r]) for r in regressions])} new regression(s):\n"
246 for k in regressions:
247 resultstring += f" {len(regressions[k])} regression(s) for {k}\n"
248 count_to_print=min([display_limit, len(regressions[k])]) if display_limit > 0 else len(regressions[k])
249 resultstring += ''.join(regressions[k][:count_to_print])
250 if count_to_print < len(regressions[k]):
251 resultstring+=' [...]\n'
252 if new_pass_count > 0:
253 resultstring += f' Additionally, {new_pass_count} previously failing test(s) is/are now passing\n'
254 if new_tests > 0:
255 resultstring += f' Additionally, {new_tests} new test(s) is/are present\n'
256 else:
257 resultstring = "%s\n%s\n" % (base_name, target_name)
258 result = None
259 else:
260 resultstring = "%s\n%s\n" % (base_name, target_name)
261
262 if not result:
263 additional_info = get_additional_info_line(new_pass_count, new_tests)
264 if additional_info:
265 resultstring += additional_info
266
267 return result, resultstring
268
269def get_results(logger, source):
270 return resultutils.load_resultsdata(source, configmap=resultutils.regression_map)
271
272def regression(args, logger):
273 base_results = get_results(logger, args.base_result)
274 target_results = get_results(logger, args.target_result)
275
276 regression_common(args, logger, base_results, target_results)
277
278# Some test case naming is poor and contains random strings, particularly lttng/babeltrace.
279# Truncating the test names works since they contain file and line number identifiers
280# which allows us to match them without the random components.
281def fixup_ptest_names(results, logger):
282 for r in results:
283 for i in results[r]:
284 tests = list(results[r][i]['result'].keys())
285 for test in tests:
286 new = None
287 if test.startswith(("ptestresult.lttng-tools.", "ptestresult.babeltrace.", "ptestresult.babeltrace2")) and "_-_" in test:
288 new = test.split("_-_")[0]
289 elif test.startswith(("ptestresult.curl.")) and "__" in test:
290 new = test.split("__")[0]
291 elif test.startswith(("ptestresult.dbus.")) and "__" in test:
292 new = test.split("__")[0]
293 elif test.startswith("ptestresult.binutils") and "build-st-" in test:
294 new = test.split(" ")[0]
295 elif test.startswith("ptestresult.gcc") and "/tmp/runtest." in test:
296 new = ".".join(test.split(".")[:2])
297 if new:
298 results[r][i]['result'][new] = results[r][i]['result'][test]
299 del results[r][i]['result'][test]
300
301def regression_common(args, logger, base_results, target_results):
302 if args.base_result_id:
303 base_results = resultutils.filter_resultsdata(base_results, args.base_result_id)
304 if args.target_result_id:
305 target_results = resultutils.filter_resultsdata(target_results, args.target_result_id)
306
307 fixup_ptest_names(base_results, logger)
308 fixup_ptest_names(target_results, logger)
309
310 matches = []
311 regressions = []
312 notfound = []
313
314 for a in base_results:
315 if a in target_results:
316 base = list(base_results[a].keys())
317 target = list(target_results[a].keys())
318 # We may have multiple base/targets which are for different configurations. Start by
319 # removing any pairs which match
320 for c in base.copy():
321 for b in target.copy():
322 if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
323 continue
324 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit)
325 if not res:
326 matches.append(resstr)
327 base.remove(c)
328 target.remove(b)
329 break
330 # Should only now see regressions, we may not be able to match multiple pairs directly
331 for c in base:
332 for b in target:
333 if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
334 continue
335 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit)
336 if res:
337 regressions.append(resstr)
338 else:
339 notfound.append("%s not found in target" % a)
340 print("\n".join(sorted(regressions)))
341 print("\n" + MISSING_TESTS_BANNER + "\n")
342 print("\n".join(sorted(notfound)))
343 print("\n" + ADDITIONAL_DATA_BANNER + "\n")
344 print("\n".join(sorted(matches)))
345 return 0
346
347def regression_git(args, logger):
348 base_results = {}
349 target_results = {}
350
351 tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
352 repo = GitRepo(args.repo)
353
354 revs = gitarchive.get_test_revs(logger, repo, tag_name, branch=args.branch)
355
356 if args.branch2:
357 revs2 = gitarchive.get_test_revs(logger, repo, tag_name, branch=args.branch2)
358 if not len(revs2):
359 logger.error("No revisions found to compare against")
360 return 1
361 if not len(revs):
362 logger.error("No revision to report on found")
363 return 1
364 else:
365 if len(revs) < 2:
366 logger.error("Only %d tester revisions found, unable to generate report" % len(revs))
367 return 1
368
369 # Pick revisions
370 if args.commit:
371 if args.commit_number:
372 logger.warning("Ignoring --commit-number as --commit was specified")
373 index1 = gitarchive.rev_find(revs, 'commit', args.commit)
374 elif args.commit_number:
375 index1 = gitarchive.rev_find(revs, 'commit_number', args.commit_number)
376 else:
377 index1 = len(revs) - 1
378
379 if args.branch2:
380 revs2.append(revs[index1])
381 index1 = len(revs2) - 1
382 revs = revs2
383
384 if args.commit2:
385 if args.commit_number2:
386 logger.warning("Ignoring --commit-number2 as --commit2 was specified")
387 index2 = gitarchive.rev_find(revs, 'commit', args.commit2)
388 elif args.commit_number2:
389 index2 = gitarchive.rev_find(revs, 'commit_number', args.commit_number2)
390 else:
391 if index1 > 0:
392 index2 = index1 - 1
393 # Find the closest matching commit number for comparision
394 # In future we could check the commit is a common ancestor and
395 # continue back if not but this good enough for now
396 while index2 > 0 and revs[index2].commit_number > revs[index1].commit_number:
397 index2 = index2 - 1
398 else:
399 logger.error("Unable to determine the other commit, use "
400 "--commit2 or --commit-number2 to specify it")
401 return 1
402
403 logger.info("Comparing:\n%s\nto\n%s\n" % (revs[index1], revs[index2]))
404
405 base_results = resultutils.git_get_result(repo, revs[index1][2])
406 target_results = resultutils.git_get_result(repo, revs[index2][2])
407
408 regression_common(args, logger, base_results, target_results)
409
410 return 0
411
412def register_commands(subparsers):
413 """Register subcommands from this plugin"""
414
415 parser_build = subparsers.add_parser('regression', help='regression file/directory analysis',
416 description='regression analysis comparing the base set of results to the target results',
417 group='analysis')
418 parser_build.set_defaults(func=regression)
419 parser_build.add_argument('base_result',
420 help='base result file/directory/URL for the comparison')
421 parser_build.add_argument('target_result',
422 help='target result file/directory/URL to compare with')
423 parser_build.add_argument('-b', '--base-result-id', default='',
424 help='(optional) filter the base results to this result ID')
425 parser_build.add_argument('-t', '--target-result-id', default='',
426 help='(optional) filter the target results to this result ID')
427 parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes")
428
429 parser_build = subparsers.add_parser('regression-git', help='regression git analysis',
430 description='regression analysis comparing base result set to target '
431 'result set',
432 group='analysis')
433 parser_build.set_defaults(func=regression_git)
434 parser_build.add_argument('repo',
435 help='the git repository containing the data')
436 parser_build.add_argument('-b', '--base-result-id', default='',
437 help='(optional) default select regression based on configurations unless base result '
438 'id was provided')
439 parser_build.add_argument('-t', '--target-result-id', default='',
440 help='(optional) default select regression based on configurations unless target result '
441 'id was provided')
442
443 parser_build.add_argument('--branch', '-B', default='master', help="Branch to find commit in")
444 parser_build.add_argument('--branch2', help="Branch to find comparision revisions in")
445 parser_build.add_argument('--commit', help="Revision to search for")
446 parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified")
447 parser_build.add_argument('--commit2', help="Revision to compare with")
448 parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified")
449 parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes")
450
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py
deleted file mode 100644
index 1c100b00ab..0000000000
--- a/scripts/lib/resulttool/report.py
+++ /dev/null
@@ -1,315 +0,0 @@
1# test result tool - report text based test results
2#
3# Copyright (c) 2019, Intel Corporation.
4# Copyright (c) 2019, Linux Foundation
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import os
10import glob
11import json
12import resulttool.resultutils as resultutils
13from oeqa.utils.git import GitRepo
14import oeqa.utils.gitarchive as gitarchive
15
16
17class ResultsTextReport(object):
18 def __init__(self):
19 self.ptests = {}
20 self.ltptests = {}
21 self.ltpposixtests = {}
22 self.result_types = {'passed': ['PASSED', 'passed', 'PASS', 'XFAIL'],
23 'failed': ['FAILED', 'failed', 'FAIL', 'ERROR', 'error', 'UNKNOWN', 'XPASS'],
24 'skipped': ['SKIPPED', 'skipped', 'UNSUPPORTED', 'UNTESTED', 'UNRESOLVED']}
25
26
27 def handle_ptest_result(self, k, status, result, machine):
28 if machine not in self.ptests:
29 self.ptests[machine] = {}
30
31 if k == 'ptestresult.sections':
32 # Ensure tests without any test results still show up on the report
33 for suite in result['ptestresult.sections']:
34 if suite not in self.ptests[machine]:
35 self.ptests[machine][suite] = {
36 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
37 'failed_testcases': [], "testcases": set(),
38 }
39 if 'duration' in result['ptestresult.sections'][suite]:
40 self.ptests[machine][suite]['duration'] = result['ptestresult.sections'][suite]['duration']
41 if 'timeout' in result['ptestresult.sections'][suite]:
42 self.ptests[machine][suite]['duration'] += " T"
43 return True
44
45 # process test result
46 try:
47 _, suite, test = k.split(".", 2)
48 except ValueError:
49 return True
50
51 # Handle 'glib-2.0'
52 if 'ptestresult.sections' in result and suite not in result['ptestresult.sections']:
53 try:
54 _, suite, suite1, test = k.split(".", 3)
55 if suite + "." + suite1 in result['ptestresult.sections']:
56 suite = suite + "." + suite1
57 except ValueError:
58 pass
59
60 if suite not in self.ptests[machine]:
61 self.ptests[machine][suite] = {
62 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
63 'failed_testcases': [], "testcases": set(),
64 }
65
66 # do not process duplicate results
67 if test in self.ptests[machine][suite]["testcases"]:
68 print("Warning duplicate ptest result '{}.{}' for {}".format(suite, test, machine))
69 return False
70
71 for tk in self.result_types:
72 if status in self.result_types[tk]:
73 self.ptests[machine][suite][tk] += 1
74 self.ptests[machine][suite]["testcases"].add(test)
75 return True
76
77 def handle_ltptest_result(self, k, status, result, machine):
78 if machine not in self.ltptests:
79 self.ltptests[machine] = {}
80
81 if k == 'ltpresult.sections':
82 # Ensure tests without any test results still show up on the report
83 for suite in result['ltpresult.sections']:
84 if suite not in self.ltptests[machine]:
85 self.ltptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
86 if 'duration' in result['ltpresult.sections'][suite]:
87 self.ltptests[machine][suite]['duration'] = result['ltpresult.sections'][suite]['duration']
88 if 'timeout' in result['ltpresult.sections'][suite]:
89 self.ltptests[machine][suite]['duration'] += " T"
90 return
91 try:
92 _, suite, test = k.split(".", 2)
93 except ValueError:
94 return
95 # Handle 'glib-2.0'
96 if 'ltpresult.sections' in result and suite not in result['ltpresult.sections']:
97 try:
98 _, suite, suite1, test = k.split(".", 3)
99 if suite + "." + suite1 in result['ltpresult.sections']:
100 suite = suite + "." + suite1
101 except ValueError:
102 pass
103 if suite not in self.ltptests[machine]:
104 self.ltptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
105 for tk in self.result_types:
106 if status in self.result_types[tk]:
107 self.ltptests[machine][suite][tk] += 1
108
109 def handle_ltpposixtest_result(self, k, status, result, machine):
110 if machine not in self.ltpposixtests:
111 self.ltpposixtests[machine] = {}
112
113 if k == 'ltpposixresult.sections':
114 # Ensure tests without any test results still show up on the report
115 for suite in result['ltpposixresult.sections']:
116 if suite not in self.ltpposixtests[machine]:
117 self.ltpposixtests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
118 if 'duration' in result['ltpposixresult.sections'][suite]:
119 self.ltpposixtests[machine][suite]['duration'] = result['ltpposixresult.sections'][suite]['duration']
120 return
121 try:
122 _, suite, test = k.split(".", 2)
123 except ValueError:
124 return
125 # Handle 'glib-2.0'
126 if 'ltpposixresult.sections' in result and suite not in result['ltpposixresult.sections']:
127 try:
128 _, suite, suite1, test = k.split(".", 3)
129 if suite + "." + suite1 in result['ltpposixresult.sections']:
130 suite = suite + "." + suite1
131 except ValueError:
132 pass
133 if suite not in self.ltpposixtests[machine]:
134 self.ltpposixtests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
135 for tk in self.result_types:
136 if status in self.result_types[tk]:
137 self.ltpposixtests[machine][suite][tk] += 1
138
139 def get_aggregated_test_result(self, logger, testresult, machine):
140 test_count_report = {'passed': 0, 'failed': 0, 'skipped': 0, 'failed_testcases': []}
141 result = testresult.get('result', [])
142 for k in result:
143 test_status = result[k].get('status', [])
144 if k.startswith("ptestresult."):
145 if not self.handle_ptest_result(k, test_status, result, machine):
146 continue
147 elif k.startswith("ltpresult."):
148 self.handle_ltptest_result(k, test_status, result, machine)
149 elif k.startswith("ltpposixresult."):
150 self.handle_ltpposixtest_result(k, test_status, result, machine)
151
152 # process result if it was not skipped by a handler
153 for tk in self.result_types:
154 if test_status in self.result_types[tk]:
155 test_count_report[tk] += 1
156 if test_status in self.result_types['failed']:
157 test_count_report['failed_testcases'].append(k)
158 return test_count_report
159
160 def print_test_report(self, template_file_name, test_count_reports):
161 from jinja2 import Environment, FileSystemLoader
162 script_path = os.path.dirname(os.path.realpath(__file__))
163 file_loader = FileSystemLoader(script_path + '/template')
164 env = Environment(loader=file_loader, trim_blocks=True)
165 template = env.get_template(template_file_name)
166 havefailed = False
167 reportvalues = []
168 machines = []
169 cols = ['passed', 'failed', 'skipped']
170 maxlen = {'passed' : 0, 'failed' : 0, 'skipped' : 0, 'result_id': 0, 'testseries' : 0, 'ptest' : 0 ,'ltptest': 0, 'ltpposixtest': 0}
171 for line in test_count_reports:
172 total_tested = line['passed'] + line['failed'] + line['skipped']
173 vals = {}
174 vals['result_id'] = line['result_id']
175 vals['testseries'] = line['testseries']
176 vals['sort'] = line['testseries'] + "_" + line['result_id']
177 vals['failed_testcases'] = line['failed_testcases']
178 for k in cols:
179 if total_tested:
180 vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f'))
181 else:
182 vals[k] = "0 (0%)"
183 for k in maxlen:
184 if k in vals and len(vals[k]) > maxlen[k]:
185 maxlen[k] = len(vals[k])
186 reportvalues.append(vals)
187 if line['failed_testcases']:
188 havefailed = True
189 if line['machine'] not in machines:
190 machines.append(line['machine'])
191 reporttotalvalues = {}
192 for k in cols:
193 reporttotalvalues[k] = '%s' % sum([line[k] for line in test_count_reports])
194 reporttotalvalues['count'] = '%s' % len(test_count_reports)
195 for (machine, report) in self.ptests.items():
196 for ptest in self.ptests[machine]:
197 if len(ptest) > maxlen['ptest']:
198 maxlen['ptest'] = len(ptest)
199 for (machine, report) in self.ltptests.items():
200 for ltptest in self.ltptests[machine]:
201 if len(ltptest) > maxlen['ltptest']:
202 maxlen['ltptest'] = len(ltptest)
203 for (machine, report) in self.ltpposixtests.items():
204 for ltpposixtest in self.ltpposixtests[machine]:
205 if len(ltpposixtest) > maxlen['ltpposixtest']:
206 maxlen['ltpposixtest'] = len(ltpposixtest)
207 output = template.render(reportvalues=reportvalues,
208 reporttotalvalues=reporttotalvalues,
209 havefailed=havefailed,
210 machines=machines,
211 ptests=self.ptests,
212 ltptests=self.ltptests,
213 ltpposixtests=self.ltpposixtests,
214 maxlen=maxlen)
215 print(output)
216
217 def view_test_report(self, logger, source_dir, branch, commit, tag, use_regression_map, raw_test, selected_test_case_only):
218 def print_selected_testcase_result(testresults, selected_test_case_only):
219 for testsuite in testresults:
220 for resultid in testresults[testsuite]:
221 result = testresults[testsuite][resultid]['result']
222 test_case_result = result.get(selected_test_case_only, {})
223 if test_case_result.get('status'):
224 print('Found selected test case result for %s from %s' % (selected_test_case_only,
225 resultid))
226 print(test_case_result['status'])
227 else:
228 print('Could not find selected test case result for %s from %s' % (selected_test_case_only,
229 resultid))
230 if test_case_result.get('log'):
231 print(test_case_result['log'])
232 test_count_reports = []
233 configmap = resultutils.store_map
234 if use_regression_map:
235 configmap = resultutils.regression_map
236 if commit:
237 if tag:
238 logger.warning("Ignoring --tag as --commit was specified")
239 tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
240 repo = GitRepo(source_dir)
241 revs = gitarchive.get_test_revs(logger, repo, tag_name, branch=branch)
242 rev_index = gitarchive.rev_find(revs, 'commit', commit)
243 testresults = resultutils.git_get_result(repo, revs[rev_index][2], configmap=configmap)
244 elif tag:
245 repo = GitRepo(source_dir)
246 testresults = resultutils.git_get_result(repo, [tag], configmap=configmap)
247 else:
248 testresults = resultutils.load_resultsdata(source_dir, configmap=configmap)
249 if raw_test:
250 raw_results = {}
251 for testsuite in testresults:
252 result = testresults[testsuite].get(raw_test, {})
253 if result:
254 raw_results[testsuite] = {raw_test: result}
255 if raw_results:
256 if selected_test_case_only:
257 print_selected_testcase_result(raw_results, selected_test_case_only)
258 else:
259 print(json.dumps(raw_results, sort_keys=True, indent=1))
260 else:
261 print('Could not find raw test result for %s' % raw_test)
262 return 0
263 if selected_test_case_only:
264 print_selected_testcase_result(testresults, selected_test_case_only)
265 return 0
266 for testsuite in testresults:
267 for resultid in testresults[testsuite]:
268 skip = False
269 result = testresults[testsuite][resultid]
270 machine = result['configuration']['MACHINE']
271
272 # Check to see if there is already results for these kinds of tests for the machine
273 for key in result['result'].keys():
274 testtype = str(key).split('.')[0]
275 if ((machine in self.ltptests and testtype == "ltpiresult" and self.ltptests[machine]) or
276 (machine in self.ltpposixtests and testtype == "ltpposixresult" and self.ltpposixtests[machine])):
277 print("Already have test results for %s on %s, skipping %s" %(str(key).split('.')[0], machine, resultid))
278 skip = True
279 break
280 if skip:
281 break
282
283 test_count_report = self.get_aggregated_test_result(logger, result, machine)
284 test_count_report['machine'] = machine
285 test_count_report['testseries'] = result['configuration']['TESTSERIES']
286 test_count_report['result_id'] = resultid
287 test_count_reports.append(test_count_report)
288 self.print_test_report('test_report_full_text.txt', test_count_reports)
289
290def report(args, logger):
291 report = ResultsTextReport()
292 report.view_test_report(logger, args.source_dir, args.branch, args.commit, args.tag, args.use_regression_map,
293 args.raw_test_only, args.selected_test_case_only)
294 return 0
295
296def register_commands(subparsers):
297 """Register subcommands from this plugin"""
298 parser_build = subparsers.add_parser('report', help='summarise test results',
299 description='print a text-based summary of the test results',
300 group='analysis')
301 parser_build.set_defaults(func=report)
302 parser_build.add_argument('source_dir',
303 help='source file/directory/URL that contain the test result files to summarise')
304 parser_build.add_argument('--branch', '-B', default='master', help="Branch to find commit in")
305 parser_build.add_argument('--commit', help="Revision to report")
306 parser_build.add_argument('-t', '--tag', default='',
307 help='source_dir is a git repository, report on the tag specified from that repository')
308 parser_build.add_argument('-m', '--use_regression_map', action='store_true',
309 help='instead of the default "store_map", use the "regression_map" for report')
310 parser_build.add_argument('-r', '--raw_test_only', default='',
311 help='output raw test result only for the user provided test result id')
312 parser_build.add_argument('-s', '--selected_test_case_only', default='',
313 help='output selected test case result for the user provided test case id, if both test '
314 'result id and test case id are provided then output the selected test case result '
315 'from the provided test result id')
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py
deleted file mode 100644
index b8fc79a6ac..0000000000
--- a/scripts/lib/resulttool/resultutils.py
+++ /dev/null
@@ -1,274 +0,0 @@
1# resulttool - common library/utility functions
2#
3# Copyright (c) 2019, Intel Corporation.
4# Copyright (c) 2019, Linux Foundation
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import os
10import base64
11import zlib
12import json
13import scriptpath
14import copy
15import urllib.request
16import posixpath
17import logging
18scriptpath.add_oe_lib_path()
19
20logger = logging.getLogger('resulttool')
21
22flatten_map = {
23 "oeselftest": [],
24 "runtime": [],
25 "sdk": [],
26 "sdkext": [],
27 "manual": []
28}
29regression_map = {
30 "oeselftest": ['TEST_TYPE', 'MACHINE'],
31 "runtime": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'IMAGE_PKGTYPE', 'DISTRO'],
32 "sdk": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'SDKMACHINE'],
33 "sdkext": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'SDKMACHINE'],
34 "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE']
35}
36store_map = {
37 "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'],
38 "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'],
39 "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
40 "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
41 "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME']
42}
43
44rawlog_sections = {
45 "ptestresult.rawlogs": "ptest",
46 "ltpresult.rawlogs": "ltp",
47 "ltpposixresult.rawlogs": "ltpposix"
48}
49
50def is_url(p):
51 """
52 Helper for determining if the given path is a URL
53 """
54 return p.startswith('http://') or p.startswith('https://')
55
56extra_configvars = {'TESTSERIES': ''}
57
58#
59# Load the json file and append the results data into the provided results dict
60#
61def append_resultsdata(results, f, configmap=store_map, configvars=extra_configvars):
62 if type(f) is str:
63 if is_url(f):
64 with urllib.request.urlopen(f) as response:
65 data = json.loads(response.read().decode('utf-8'))
66 url = urllib.parse.urlparse(f)
67 testseries = posixpath.basename(posixpath.dirname(url.path))
68 else:
69 with open(f, "r") as filedata:
70 try:
71 data = json.load(filedata)
72 except json.decoder.JSONDecodeError:
73 print("Cannot decode {}. Possible corruption. Skipping.".format(f))
74 data = ""
75 testseries = os.path.basename(os.path.dirname(f))
76 else:
77 data = f
78 for res in data:
79 if "configuration" not in data[res] or "result" not in data[res]:
80 raise ValueError("Test results data without configuration or result section?")
81 for config in configvars:
82 if config == "TESTSERIES" and "TESTSERIES" not in data[res]["configuration"]:
83 data[res]["configuration"]["TESTSERIES"] = testseries
84 continue
85 if config not in data[res]["configuration"]:
86 data[res]["configuration"][config] = configvars[config]
87 testtype = data[res]["configuration"].get("TEST_TYPE")
88 if testtype not in configmap:
89 raise ValueError("Unknown test type %s" % testtype)
90 testpath = "/".join(data[res]["configuration"].get(i) for i in configmap[testtype])
91 if testpath not in results:
92 results[testpath] = {}
93 results[testpath][res] = data[res]
94
95#
96# Walk a directory and find/load results data
97# or load directly from a file
98#
99def load_resultsdata(source, configmap=store_map, configvars=extra_configvars):
100 results = {}
101 if is_url(source) or os.path.isfile(source):
102 append_resultsdata(results, source, configmap, configvars)
103 return results
104 for root, dirs, files in os.walk(source):
105 for name in files:
106 f = os.path.join(root, name)
107 if name == "testresults.json":
108 append_resultsdata(results, f, configmap, configvars)
109 return results
110
111def filter_resultsdata(results, resultid):
112 newresults = {}
113 for r in results:
114 for i in results[r]:
115 if i == resultsid:
116 newresults[r] = {}
117 newresults[r][i] = results[r][i]
118 return newresults
119
120def strip_logs(results):
121 newresults = copy.deepcopy(results)
122 for res in newresults:
123 if 'result' not in newresults[res]:
124 continue
125 for logtype in rawlog_sections:
126 if logtype in newresults[res]['result']:
127 del newresults[res]['result'][logtype]
128 if 'ptestresult.sections' in newresults[res]['result']:
129 for i in newresults[res]['result']['ptestresult.sections']:
130 if 'log' in newresults[res]['result']['ptestresult.sections'][i]:
131 del newresults[res]['result']['ptestresult.sections'][i]['log']
132 return newresults
133
134# For timing numbers, crazy amounts of precision don't make sense and just confuse
135# the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1,
136# trim to 4 significant digits
137def trim_durations(results):
138 for res in results:
139 if 'result' not in results[res]:
140 continue
141 for entry in results[res]['result']:
142 if 'duration' in results[res]['result'][entry]:
143 duration = results[res]['result'][entry]['duration']
144 if duration > 1:
145 results[res]['result'][entry]['duration'] = float("%.3f" % duration)
146 elif duration < 1:
147 results[res]['result'][entry]['duration'] = float("%.4g" % duration)
148 return results
149
150def handle_cleanups(results):
151 # Remove pointless path duplication from old format reproducibility results
152 for res2 in results:
153 try:
154 section = results[res2]['result']['reproducible']['files']
155 for pkgtype in section:
156 for filelist in section[pkgtype].copy():
157 if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict:
158 newlist = []
159 for entry in section[pkgtype][filelist]:
160 newlist.append(entry["reference"].split("/./")[1])
161 section[pkgtype][filelist] = newlist
162
163 except KeyError:
164 pass
165 # Remove pointless duplicate rawlogs data
166 try:
167 del results[res2]['result']['reproducible.rawlogs']
168 except KeyError:
169 pass
170
171def decode_log(logdata):
172 if isinstance(logdata, str):
173 return logdata
174 elif isinstance(logdata, dict):
175 if "compressed" in logdata:
176 data = logdata.get("compressed")
177 data = base64.b64decode(data.encode("utf-8"))
178 data = zlib.decompress(data)
179 return data.decode("utf-8", errors='ignore')
180 return None
181
182def generic_get_log(sectionname, results, section):
183 if sectionname not in results:
184 return None
185 if section not in results[sectionname]:
186 return None
187
188 ptest = results[sectionname][section]
189 if 'log' not in ptest:
190 return None
191 return decode_log(ptest['log'])
192
193def ptestresult_get_log(results, section):
194 return generic_get_log('ptestresult.sections', results, section)
195
196def generic_get_rawlogs(sectname, results):
197 if sectname not in results:
198 return None
199 if 'log' not in results[sectname]:
200 return None
201 return decode_log(results[sectname]['log'])
202
203def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
204 for res in results:
205 if res:
206 dst = destdir + "/" + res + "/" + fn
207 else:
208 dst = destdir + "/" + fn
209 os.makedirs(os.path.dirname(dst), exist_ok=True)
210 resultsout = results[res]
211 if not ptestjson:
212 resultsout = strip_logs(results[res])
213 trim_durations(resultsout)
214 handle_cleanups(resultsout)
215 with open(dst, 'w') as f:
216 f.write(json.dumps(resultsout, sort_keys=True, indent=1))
217 for res2 in results[res]:
218 if ptestlogs and 'result' in results[res][res2]:
219 seriesresults = results[res][res2]['result']
220 for logtype in rawlog_sections:
221 logdata = generic_get_rawlogs(logtype, seriesresults)
222 if logdata is not None:
223 logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log")
224 with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f:
225 f.write(logdata)
226 if 'ptestresult.sections' in seriesresults:
227 for i in seriesresults['ptestresult.sections']:
228 sectionlog = ptestresult_get_log(seriesresults, i)
229 if sectionlog is not None:
230 with open(dst.replace(fn, "ptest-%s.log" % i), "w+") as f:
231 f.write(sectionlog)
232
233def git_get_result(repo, tags, configmap=store_map):
234 git_objs = []
235 for tag in tags:
236 files = repo.run_cmd(['ls-tree', "--name-only", "-r", tag]).splitlines()
237 git_objs.extend([tag + ':' + f for f in files if f.endswith("testresults.json")])
238
239 def parse_json_stream(data):
240 """Parse multiple concatenated JSON objects"""
241 objs = []
242 json_d = ""
243 for line in data.splitlines():
244 if line == '}{':
245 json_d += '}'
246 objs.append(json.loads(json_d))
247 json_d = '{'
248 else:
249 json_d += line
250 objs.append(json.loads(json_d))
251 return objs
252
253 # Optimize by reading all data with one git command
254 results = {}
255 for obj in parse_json_stream(repo.run_cmd(['show'] + git_objs + ['--'])):
256 append_resultsdata(results, obj, configmap=configmap)
257
258 return results
259
260def test_run_results(results):
261 """
262 Convenient generator function that iterates over all test runs that have a
263 result section.
264
265 Generates a tuple of:
266 (result json file path, test run name, test run (dict), test run "results" (dict))
267 for each test run that has a "result" section
268 """
269 for path in results:
270 for run_name, test_run in results[path].items():
271 if not 'result' in test_run:
272 continue
273 yield path, run_name, test_run, test_run['result']
274
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py
deleted file mode 100644
index b143334e69..0000000000
--- a/scripts/lib/resulttool/store.py
+++ /dev/null
@@ -1,125 +0,0 @@
1# resulttool - store test results
2#
3# Copyright (c) 2019, Intel Corporation.
4# Copyright (c) 2019, Linux Foundation
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import tempfile
10import os
11import subprocess
12import json
13import shutil
14import scriptpath
15scriptpath.add_bitbake_lib_path()
16scriptpath.add_oe_lib_path()
17import resulttool.resultutils as resultutils
18import oeqa.utils.gitarchive as gitarchive
19
20
21def store(args, logger):
22 tempdir = tempfile.mkdtemp(prefix='testresults.')
23 try:
24 configvars = resultutils.extra_configvars.copy()
25 if args.executed_by:
26 configvars['EXECUTED_BY'] = args.executed_by
27 if args.extra_test_env:
28 configvars['EXTRA_TEST_ENV'] = args.extra_test_env
29 results = {}
30 logger.info('Reading files from %s' % args.source)
31 if resultutils.is_url(args.source) or os.path.isfile(args.source):
32 resultutils.append_resultsdata(results, args.source, configvars=configvars)
33 else:
34 for root, dirs, files in os.walk(args.source):
35 for name in files:
36 f = os.path.join(root, name)
37 if name == "testresults.json":
38 resultutils.append_resultsdata(results, f, configvars=configvars)
39 elif args.all:
40 dst = f.replace(args.source, tempdir + "/")
41 os.makedirs(os.path.dirname(dst), exist_ok=True)
42 shutil.copyfile(f, dst)
43
44 revisions = {}
45
46 if not results and not args.all:
47 if args.allow_empty:
48 logger.info("No results found to store")
49 return 0
50 logger.error("No results found to store")
51 return 1
52
53 # Find the branch/commit/commit_count and ensure they all match
54 for suite in results:
55 for result in results[suite]:
56 config = results[suite][result]['configuration']['LAYERS']['meta']
57 revision = (config['commit'], config['branch'], str(config['commit_count']))
58 if revision not in revisions:
59 revisions[revision] = {}
60 if suite not in revisions[revision]:
61 revisions[revision][suite] = {}
62 revisions[revision][suite][result] = results[suite][result]
63
64 logger.info("Found %d revisions to store" % len(revisions))
65
66 for r in revisions:
67 results = revisions[r]
68 if args.revision and r[0] != args.revision:
69 logger.info('skipping %s as non-matching' % r[0])
70 continue
71 keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]}
72 subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"])
73 resultutils.save_resultsdata(results, tempdir, ptestlogs=True)
74
75 logger.info('Storing test result into git repository %s' % args.git_dir)
76
77 excludes = []
78 if args.logfile_archive:
79 excludes = ['*.log', "*.log.zst"]
80
81 tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False,
82 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
83 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
84 'Test run #{tag_number} of {branch}:{commit}', '',
85 excludes, [], False, keywords, logger)
86
87 if args.logfile_archive:
88 logdir = args.logfile_archive + "/" + tagname
89 shutil.copytree(tempdir, logdir)
90 os.chmod(logdir, 0o755)
91 for root, dirs, files in os.walk(logdir):
92 for name in files:
93 if not name.endswith(".log"):
94 continue
95 f = os.path.join(root, name)
96 subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True)
97 finally:
98 subprocess.check_call(["rm", "-rf", tempdir])
99
100 return 0
101
102def register_commands(subparsers):
103 """Register subcommands from this plugin"""
104 parser_build = subparsers.add_parser('store', help='store test results into a git repository',
105 description='takes a results file or directory of results files and stores '
106 'them into the destination git repository, splitting out the results '
107 'files as configured',
108 group='setup')
109 parser_build.set_defaults(func=store)
110 parser_build.add_argument('source',
111 help='source file/directory/URL that contain the test result files to be stored')
112 parser_build.add_argument('git_dir',
113 help='the location of the git repository to store the results in')
114 parser_build.add_argument('-a', '--all', action='store_true',
115 help='include all files, not just testresults.json files')
116 parser_build.add_argument('-e', '--allow-empty', action='store_true',
117 help='don\'t error if no results to store are found')
118 parser_build.add_argument('-x', '--executed-by', default='',
119 help='add executed-by configuration to each result file')
120 parser_build.add_argument('-t', '--extra-test-env', default='',
121 help='add extra test environment data to each result file configuration')
122 parser_build.add_argument('-r', '--revision', default='',
123 help='only store data for the specified revision')
124 parser_build.add_argument('-l', '--logfile-archive', default='',
125 help='directory to separately archive log files along with a copy of the results')
diff --git a/scripts/lib/resulttool/template/test_report_full_text.txt b/scripts/lib/resulttool/template/test_report_full_text.txt
deleted file mode 100644
index 2efba2ef6f..0000000000
--- a/scripts/lib/resulttool/template/test_report_full_text.txt
+++ /dev/null
@@ -1,79 +0,0 @@
1==============================================================================================================
2Test Result Status Summary (Counts/Percentages sorted by testseries, ID)
3==============================================================================================================
4--------------------------------------------------------------------------------------------------------------
5{{ 'Test Series'.ljust(maxlen['testseries']) }} | {{ 'ID'.ljust(maxlen['result_id']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }}
6--------------------------------------------------------------------------------------------------------------
7{% for report in reportvalues |sort(attribute='sort') %}
8{{ report.testseries.ljust(maxlen['testseries']) }} | {{ report.result_id.ljust(maxlen['result_id']) }} | {{ (report.passed|string).ljust(maxlen['passed']) }} | {{ (report.failed|string).ljust(maxlen['failed']) }} | {{ (report.skipped|string).ljust(maxlen['skipped']) }}
9{% endfor %}
10--------------------------------------------------------------------------------------------------------------
11{{ 'Total'.ljust(maxlen['testseries']) }} | {{ reporttotalvalues['count'].ljust(maxlen['result_id']) }} | {{ reporttotalvalues['passed'].ljust(maxlen['passed']) }} | {{ reporttotalvalues['failed'].ljust(maxlen['failed']) }} | {{ reporttotalvalues['skipped'].ljust(maxlen['skipped']) }}
12--------------------------------------------------------------------------------------------------------------
13
14{% for machine in machines %}
15{% if ptests[machine] %}
16==============================================================================================================
17{{ machine }} PTest Result Summary
18==============================================================================================================
19--------------------------------------------------------------------------------------------------------------
20{{ 'Recipe'.ljust(maxlen['ptest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
21--------------------------------------------------------------------------------------------------------------
22{% for ptest in ptests[machine] |sort %}
23{{ ptest.ljust(maxlen['ptest']) }} | {{ (ptests[machine][ptest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ptests[machine][ptest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ptests[machine][ptest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ptests[machine][ptest]['duration']|string) }}
24{% endfor %}
25--------------------------------------------------------------------------------------------------------------
26
27{% endif %}
28{% endfor %}
29
30{% for machine in machines %}
31{% if ltptests[machine] %}
32==============================================================================================================
33{{ machine }} Ltp Test Result Summary
34==============================================================================================================
35--------------------------------------------------------------------------------------------------------------
36{{ 'Recipe'.ljust(maxlen['ltptest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
37--------------------------------------------------------------------------------------------------------------
38{% for ltptest in ltptests[machine] |sort %}
39{{ ltptest.ljust(maxlen['ltptest']) }} | {{ (ltptests[machine][ltptest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ltptests[machine][ltptest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ltptests[machine][ltptest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ltptests[machine][ltptest]['duration']|string) }}
40{% endfor %}
41--------------------------------------------------------------------------------------------------------------
42
43{% endif %}
44{% endfor %}
45
46{% for machine in machines %}
47{% if ltpposixtests[machine] %}
48==============================================================================================================
49{{ machine }} Ltp Posix Result Summary
50==============================================================================================================
51--------------------------------------------------------------------------------------------------------------
52{{ 'Recipe'.ljust(maxlen['ltpposixtest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
53--------------------------------------------------------------------------------------------------------------
54{% for ltpposixtest in ltpposixtests[machine] |sort %}
55{{ ltpposixtest.ljust(maxlen['ltpposixtest']) }} | {{ (ltpposixtests[machine][ltpposixtest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ltpposixtests[machine][ltpposixtest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ltpposixtests[machine][ltpposixtest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ltpposixtests[machine][ltpposixtest]['duration']|string) }}
56{% endfor %}
57--------------------------------------------------------------------------------------------------------------
58
59{% endif %}
60{% endfor %}
61
62
63==============================================================================================================
64Failed test cases (sorted by testseries, ID)
65==============================================================================================================
66{% if havefailed %}
67--------------------------------------------------------------------------------------------------------------
68{% for report in reportvalues |sort(attribute='sort') %}
69{% if report.failed_testcases %}
70testseries | result_id : {{ report.testseries }} | {{ report.result_id }}
71{% for testcase in report.failed_testcases %}
72 {{ testcase }}
73{% endfor %}
74{% endif %}
75{% endfor %}
76--------------------------------------------------------------------------------------------------------------
77{% else %}
78There were no test failures
79{% endif %}
diff --git a/scripts/lib/scriptpath.py b/scripts/lib/scriptpath.py
deleted file mode 100644
index f32326db3a..0000000000
--- a/scripts/lib/scriptpath.py
+++ /dev/null
@@ -1,32 +0,0 @@
1# Path utility functions for OE python scripts
2#
3# Copyright (C) 2012-2014 Intel Corporation
4# Copyright (C) 2011 Mentor Graphics Corporation
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import sys
10import os
11import os.path
12
13def add_oe_lib_path():
14 basepath = os.path.abspath(os.path.dirname(__file__) + '/../..')
15 newpath = basepath + '/meta/lib'
16 sys.path.insert(0, newpath)
17
18def add_bitbake_lib_path():
19 basepath = os.path.abspath(os.path.dirname(__file__) + '/../..')
20 bitbakepath = None
21 if os.path.exists(basepath + '/bitbake/lib/bb'):
22 bitbakepath = basepath + '/bitbake'
23 else:
24 # look for bitbake/bin dir in PATH
25 for pth in os.environ['PATH'].split(':'):
26 if os.path.exists(os.path.join(pth, '../lib/bb')):
27 bitbakepath = os.path.abspath(os.path.join(pth, '..'))
28 break
29
30 if bitbakepath:
31 sys.path.insert(0, bitbakepath + '/lib')
32 return bitbakepath
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py
deleted file mode 100644
index 32e749dbb1..0000000000
--- a/scripts/lib/scriptutils.py
+++ /dev/null
@@ -1,274 +0,0 @@
1# Script utility functions
2#
3# Copyright (C) 2014 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import glob
9import logging
10import os
11import random
12import shlex
13import shutil
14import string
15import subprocess
16import sys
17import tempfile
18import threading
19import importlib
20import importlib.machinery
21import importlib.util
22
23class KeepAliveStreamHandler(logging.StreamHandler):
24 def __init__(self, keepalive=True, **kwargs):
25 super().__init__(**kwargs)
26 if keepalive is True:
27 keepalive = 5000 # default timeout
28 self._timeout = threading.Condition()
29 self._stop = False
30
31 # background thread waits on condition, if the condition does not
32 # happen emit a keep alive message
33 def thread():
34 while not self._stop:
35 with self._timeout:
36 if not self._timeout.wait(keepalive):
37 self.emit(logging.LogRecord("keepalive", logging.INFO,
38 None, None, "Keepalive message", None, None))
39
40 self._thread = threading.Thread(target=thread, daemon=True)
41 self._thread.start()
42
43 def close(self):
44 # mark the thread to stop and notify it
45 self._stop = True
46 with self._timeout:
47 self._timeout.notify()
48 # wait for it to join
49 self._thread.join()
50 super().close()
51
52 def emit(self, record):
53 super().emit(record)
54 # trigger timer reset
55 with self._timeout:
56 self._timeout.notify()
57
58def logger_create(name, stream=None, keepalive=None):
59 logger = logging.getLogger(name)
60 if keepalive is not None:
61 loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive)
62 else:
63 loggerhandler = logging.StreamHandler(stream=stream)
64 loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
65 logger.addHandler(loggerhandler)
66 logger.setLevel(logging.INFO)
67 return logger
68
69def logger_setup_color(logger, color='auto'):
70 from bb.msg import BBLogFormatter
71
72 for handler in logger.handlers:
73 if (isinstance(handler, logging.StreamHandler) and
74 isinstance(handler.formatter, BBLogFormatter)):
75 if color == 'always' or (color == 'auto' and handler.stream.isatty()):
76 handler.formatter.enable_color()
77
78
79def load_plugins(logger, plugins, pluginpath):
80 def load_plugin(name):
81 logger.debug('Loading plugin %s' % name)
82 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath])
83 if spec:
84 mod = importlib.util.module_from_spec(spec)
85 spec.loader.exec_module(mod)
86 return mod
87
88 def plugin_name(filename):
89 return os.path.splitext(os.path.basename(filename))[0]
90
91 known_plugins = [plugin_name(p.__name__) for p in plugins]
92 logger.debug('Loading plugins from %s...' % pluginpath)
93 for fn in glob.glob(os.path.join(pluginpath, '*.py')):
94 name = plugin_name(fn)
95 if name != '__init__' and name not in known_plugins:
96 plugin = load_plugin(name)
97 if hasattr(plugin, 'plugin_init'):
98 plugin.plugin_init(plugins)
99 plugins.append(plugin)
100
101
102def git_convert_standalone_clone(repodir):
103 """If specified directory is a git repository, ensure it's a standalone clone"""
104 import bb.process
105 if os.path.exists(os.path.join(repodir, '.git')):
106 alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates')
107 if os.path.exists(alternatesfile):
108 # This will have been cloned with -s, so we need to convert it so none
109 # of the contents is shared
110 bb.process.run('git repack -a', cwd=repodir)
111 os.remove(alternatesfile)
112
113def _get_temp_recipe_dir(d):
114 # This is a little bit hacky but we need to find a place where we can put
115 # the recipe so that bitbake can find it. We're going to delete it at the
116 # end so it doesn't really matter where we put it.
117 bbfiles = d.getVar('BBFILES').split()
118 fetchrecipedir = None
119 for pth in bbfiles:
120 if pth.endswith('.bb'):
121 pthdir = os.path.dirname(pth)
122 if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK):
123 fetchrecipedir = pthdir.replace('*', 'recipetool')
124 if pthdir.endswith('workspace/recipes/*'):
125 # Prefer the workspace
126 break
127 return fetchrecipedir
128
129class FetchUrlFailure(Exception):
130 def __init__(self, url):
131 self.url = url
132 def __str__(self):
133 return "Failed to fetch URL %s" % self.url
134
135def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False):
136 """
137 Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e.
138 any dependencies that need to be satisfied in order to support the fetch
139 operation will be taken care of
140 """
141
142 import bb
143
144 checksums = {}
145 fetchrecipepn = None
146
147 # We need to put our temp directory under ${BASE_WORKDIR} otherwise
148 # we may have problems with the recipe-specific sysroot population
149 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
150 bb.utils.mkdirhier(tmpparent)
151 tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
152 try:
153 tmpworkdir = os.path.join(tmpdir, 'work')
154 logger.debug('fetch_url: temp dir is %s' % tmpdir)
155
156 fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data)
157 if not fetchrecipedir:
158 logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe')
159 sys.exit(1)
160 fetchrecipe = None
161 bb.utils.mkdirhier(fetchrecipedir)
162 try:
163 # Generate a dummy recipe so we can follow more or less normal paths
164 # for do_fetch and do_unpack
165 # I'd use tempfile functions here but underscores can be produced by that and those
166 # aren't allowed in recipe file names except to separate the version
167 rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
168 fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring)
169 fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0]
170 logger.debug('Generating initial recipe %s for fetching' % fetchrecipe)
171 with open(fetchrecipe, 'w') as f:
172 # We don't want to have to specify LIC_FILES_CHKSUM
173 f.write('LICENSE = "CLOSED"\n')
174 # We don't need the cross-compiler
175 f.write('INHIBIT_DEFAULT_DEPS = "1"\n')
176 # We don't have the checksums yet so we can't require them
177 f.write('BB_STRICT_CHECKSUM = "ignore"\n')
178 f.write('SRC_URI = "%s"\n' % srcuri)
179 f.write('SRCREV = "%s"\n' % srcrev)
180 f.write('PV = "0.0+"\n')
181 f.write('WORKDIR = "%s"\n' % tmpworkdir)
182 f.write('UNPACKDIR = "%s"\n' % destdir)
183
184 # Set S out of the way so it doesn't get created under the workdir
185 s_dir = os.path.join(tmpdir, 'emptysrc')
186 bb.utils.mkdirhier(s_dir)
187 f.write('S = "%s"\n' % s_dir)
188
189 if not mirrors:
190 # We do not need PREMIRRORS since we are almost certainly
191 # fetching new source rather than something that has already
192 # been fetched. Hence, we disable them by default.
193 # However, we provide an option for users to enable it.
194 f.write('PREMIRRORS = ""\n')
195 f.write('MIRRORS = ""\n')
196
197 logger.info('Fetching %s...' % srcuri)
198
199 # FIXME this is too noisy at the moment
200
201 # Parse recipes so our new recipe gets picked up
202 tinfoil.parse_recipes()
203
204 def eventhandler(event):
205 if isinstance(event, bb.fetch2.MissingChecksumEvent):
206 checksums.update(event.checksums)
207 return True
208 return False
209
210 # Run the fetch + unpack tasks
211 res = tinfoil.build_targets(fetchrecipepn,
212 'do_unpack',
213 handle_events=True,
214 extra_events=['bb.fetch2.MissingChecksumEvent'],
215 event_callback=eventhandler)
216 if not res:
217 raise FetchUrlFailure(srcuri)
218
219 # Remove unneeded directories
220 rd = tinfoil.parse_recipe(fetchrecipepn)
221 if rd:
222 pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
223 for pathvar in pathvars:
224 path = rd.getVar(pathvar)
225 if os.path.exists(path):
226 shutil.rmtree(path)
227 finally:
228 if fetchrecipe:
229 try:
230 os.remove(fetchrecipe)
231 except FileNotFoundError:
232 pass
233 try:
234 os.rmdir(fetchrecipedir)
235 except OSError as e:
236 import errno
237 if e.errno != errno.ENOTEMPTY:
238 raise
239
240 finally:
241 if not preserve_tmp:
242 shutil.rmtree(tmpdir)
243 tmpdir = None
244
245 return checksums, tmpdir
246
247
248def run_editor(fn, logger=None):
249 if isinstance(fn, str):
250 files = [fn]
251 else:
252 files = fn
253
254 editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi'))
255 try:
256 #print(shlex.split(editor) + files)
257 return subprocess.check_call(shlex.split(editor) + files)
258 except subprocess.CalledProcessError as exc:
259 logger.error("Execution of '%s' failed: %s" % (editor, exc))
260 return 1
261
262def is_src_url(param):
263 """
264 Check if a parameter is a URL and return True if so
265 NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works
266 """
267 if not param:
268 return False
269 elif '://' in param:
270 return True
271 elif param.startswith('git@') or ('@' in param and param.endswith('.git')):
272 return True
273 return False
274
diff --git a/scripts/lib/wic/__init__.py b/scripts/lib/wic/__init__.py
deleted file mode 100644
index 85567934ae..0000000000
--- a/scripts/lib/wic/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright (c) 2007 Red Hat, Inc.
4# Copyright (c) 2011 Intel, Inc.
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9class WicError(Exception):
10 pass
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc
deleted file mode 100644
index 89880b417b..0000000000
--- a/scripts/lib/wic/canned-wks/common.wks.inc
+++ /dev/null
@@ -1,3 +0,0 @@
1# This file is included into 3 canned wks files from this directory
2part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
3part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024
diff --git a/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg b/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg
deleted file mode 100644
index c58e74a853..0000000000
--- a/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg
+++ /dev/null
@@ -1,27 +0,0 @@
1# This is an example configuration file for syslinux.
2TIMEOUT 50
3ALLOWOPTIONS 1
4SERIAL 0 115200
5PROMPT 0
6
7UI vesamenu.c32
8menu title Select boot options
9menu tabmsg Press [Tab] to edit, [Return] to select
10
11DEFAULT Graphics console boot
12
13LABEL Graphics console boot
14KERNEL /vmlinuz
15APPEND label=boot rootwait
16
17LABEL Serial console boot
18KERNEL /vmlinuz
19APPEND label=boot rootwait console=ttyS0,115200
20
21LABEL Graphics console install
22KERNEL /vmlinuz
23APPEND label=install rootwait
24
25LABEL Serial console install
26KERNEL /vmlinuz
27APPEND label=install rootwait console=ttyS0,115200
diff --git a/scripts/lib/wic/canned-wks/directdisk-bootloader-config.wks b/scripts/lib/wic/canned-wks/directdisk-bootloader-config.wks
deleted file mode 100644
index 3529e05c87..0000000000
--- a/scripts/lib/wic/canned-wks/directdisk-bootloader-config.wks
+++ /dev/null
@@ -1,8 +0,0 @@
1# short-description: Create a 'pcbios' direct disk image with custom bootloader config
2# long-description: Creates a partitioned legacy BIOS disk image that the user
3# can directly dd to boot media. The bootloader configuration source is a user file.
4
5include common.wks.inc
6
7bootloader --configfile="directdisk-bootloader-config.cfg"
8
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
deleted file mode 100644
index 8d7d8de6ea..0000000000
--- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks
+++ /dev/null
@@ -1,10 +0,0 @@
1# short-description: Create a 'pcbios' direct disk image
2# long-description: Creates a partitioned legacy BIOS disk image that the user
3# can directly dd to boot media.
4
5
6part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8
9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8"
10
diff --git a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
deleted file mode 100644
index f61d941d6d..0000000000
--- a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
+++ /dev/null
@@ -1,23 +0,0 @@
1# short-description: Create multi rootfs image using rootfs plugin
2# long-description: Creates a partitioned disk image with two rootfs partitions
3# using rootfs plugin.
4#
5# Partitions can use either
6# - indirect rootfs references to image recipe(s):
7# wic create directdisk-multi-indirect-recipes -e core-image-minimal \
8# --rootfs-dir rootfs1=core-image-minimal
9# --rootfs-dir rootfs2=core-image-minimal-dev
10#
11# - or paths to rootfs directories:
12# wic create directdisk-multi-rootfs \
13# --rootfs-dir rootfs1=tmp/work/qemux86_64-poky-linux/core-image-minimal/1.0-r0/rootfs/
14# --rootfs-dir rootfs2=tmp/work/qemux86_64-poky-linux/core-image-minimal-dev/1.0-r0/rootfs/
15#
16# - or any combinations of -r and --rootfs command line options
17
18part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
19part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024
20part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024
21
22bootloader --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8"
23
diff --git a/scripts/lib/wic/canned-wks/directdisk.wks b/scripts/lib/wic/canned-wks/directdisk.wks
deleted file mode 100644
index 8c8e06b02c..0000000000
--- a/scripts/lib/wic/canned-wks/directdisk.wks
+++ /dev/null
@@ -1,8 +0,0 @@
1# short-description: Create a 'pcbios' direct disk image
2# long-description: Creates a partitioned legacy BIOS disk image that the user
3# can directly dd to boot media.
4
5include common.wks.inc
6
7bootloader --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8"
8
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
deleted file mode 100644
index 5211972955..0000000000
--- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
+++ /dev/null
@@ -1,3 +0,0 @@
1bootloader --ptable gpt
2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in
deleted file mode 100644
index 67cc41a241..0000000000
--- a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in
+++ /dev/null
@@ -1,3 +0,0 @@
1bootloader --ptable gpt --timeout=5
2part /boot --source bootimg-efi --sourceparams="loader=${EFI_PROVIDER}" --label boot --active --align 1024 --use-uuid --part-name="ESP" --part-type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B --overhead-factor=1
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks
deleted file mode 100644
index 5fa6682a9e..0000000000
--- a/scripts/lib/wic/canned-wks/mkefidisk.wks
+++ /dev/null
@@ -1,11 +0,0 @@
1# short-description: Create an EFI disk image
2# long-description: Creates a partitioned EFI disk image that the user
3# can directly dd to boot media.
4
5part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024
6
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8
9part swap --ondisk sda --size 44 --label swap1 --fstype=swap
10
11bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=${KERNEL_CONSOLE} console=tty0"
diff --git a/scripts/lib/wic/canned-wks/mkhybridiso.wks b/scripts/lib/wic/canned-wks/mkhybridiso.wks
deleted file mode 100644
index 48c5ac4791..0000000000
--- a/scripts/lib/wic/canned-wks/mkhybridiso.wks
+++ /dev/null
@@ -1,7 +0,0 @@
1# short-description: Create a hybrid ISO image
2# long-description: Creates an EFI and legacy bootable hybrid ISO image
3# which can be used on optical media as well as USB media.
4
5part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO
6
7bootloader --timeout=15 --append=""
diff --git a/scripts/lib/wic/canned-wks/qemuloongarch.wks b/scripts/lib/wic/canned-wks/qemuloongarch.wks
deleted file mode 100644
index 8465c7a8c0..0000000000
--- a/scripts/lib/wic/canned-wks/qemuloongarch.wks
+++ /dev/null
@@ -1,3 +0,0 @@
1# short-description: Create qcow2 image for LoongArch QEMU machines
2
3part / --source rootfs --fstype=ext4 --label root --align 4096 --size 5G
diff --git a/scripts/lib/wic/canned-wks/qemuriscv.wks b/scripts/lib/wic/canned-wks/qemuriscv.wks
deleted file mode 100644
index 12c68b7069..0000000000
--- a/scripts/lib/wic/canned-wks/qemuriscv.wks
+++ /dev/null
@@ -1,3 +0,0 @@
1# short-description: Create qcow2 image for RISC-V QEMU machines
2
3part / --source rootfs --fstype=ext4 --label root --align 4096 --size 5G
diff --git a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
deleted file mode 100644
index 808997611a..0000000000
--- a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
+++ /dev/null
@@ -1,8 +0,0 @@
1# short-description: Create a qemu machine 'pcbios' direct disk image
2# long-description: Creates a partitioned legacy BIOS disk image that the user
3# can directly use to boot a qemu machine.
4
5include common.wks.inc
6
7bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 console=tty console=ttyS0 "
8
diff --git a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
deleted file mode 100644
index 63bc4dab6a..0000000000
--- a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
+++ /dev/null
@@ -1,6 +0,0 @@
1# short-description: Create SD card image with a boot partition
2# long-description: Creates a partitioned SD card image. Boot files
3# are located in the first vfat partition.
4
5part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16
6part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4
diff --git a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
deleted file mode 100644
index 95d7b97a60..0000000000
--- a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
+++ /dev/null
@@ -1,11 +0,0 @@
1# short-description: Create an EFI disk image with systemd-boot
2# long-description: Creates a partitioned EFI disk image that the user
3# can directly dd to boot media. The selected bootloader is systemd-boot.
4
5part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid
6
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8
9part swap --ondisk sda --size 44 --label swap1 --fstype=swap --use-uuid
10
11bootloader --ptable gpt --timeout=5 --append="rootwait rootfstype=ext4 console=ttyS0,115200 console=tty0"
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py
deleted file mode 100644
index 64b1d52882..0000000000
--- a/scripts/lib/wic/engine.py
+++ /dev/null
@@ -1,644 +0,0 @@
1#
2# Copyright (c) 2013, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7
8# This module implements the image creation engine used by 'wic' to
9# create images. The engine parses through the OpenEmbedded kickstart
10# (wks) file specified and generates images that can then be directly
11# written onto media.
12#
13# AUTHORS
14# Tom Zanussi <tom.zanussi (at] linux.intel.com>
15#
16
17import logging
18import os
19import tempfile
20import json
21import subprocess
22import shutil
23import re
24
25from collections import namedtuple, OrderedDict
26
27from wic import WicError
28from wic.filemap import sparse_copy
29from wic.pluginbase import PluginMgr
30from wic.misc import get_bitbake_var, exec_cmd
31
32logger = logging.getLogger('wic')
33
34def verify_build_env():
35 """
36 Verify that the build environment is sane.
37
38 Returns True if it is, false otherwise
39 """
40 if not os.environ.get("BUILDDIR"):
41 raise WicError("BUILDDIR not found, exiting. (Did you forget to source oe-init-build-env?)")
42
43 return True
44
45
46CANNED_IMAGE_DIR = "lib/wic/canned-wks" # relative to scripts
47SCRIPTS_CANNED_IMAGE_DIR = "scripts/" + CANNED_IMAGE_DIR
48WIC_DIR = "wic"
49
50def build_canned_image_list(path):
51 layers_path = get_bitbake_var("BBLAYERS")
52 canned_wks_layer_dirs = []
53
54 if layers_path is not None:
55 for layer_path in layers_path.split():
56 for wks_path in (WIC_DIR, SCRIPTS_CANNED_IMAGE_DIR):
57 cpath = os.path.join(layer_path, wks_path)
58 if os.path.isdir(cpath):
59 canned_wks_layer_dirs.append(cpath)
60
61 cpath = os.path.join(path, CANNED_IMAGE_DIR)
62 canned_wks_layer_dirs.append(cpath)
63
64 return canned_wks_layer_dirs
65
66def find_canned_image(scripts_path, wks_file):
67 """
68 Find a .wks file with the given name in the canned files dir.
69
70 Return False if not found
71 """
72 layers_canned_wks_dir = build_canned_image_list(scripts_path)
73
74 for canned_wks_dir in layers_canned_wks_dir:
75 for root, dirs, files in os.walk(canned_wks_dir):
76 for fname in files:
77 if fname.endswith("~") or fname.endswith("#"):
78 continue
79 if ((fname.endswith(".wks") and wks_file + ".wks" == fname) or \
80 (fname.endswith(".wks.in") and wks_file + ".wks.in" == fname)):
81 fullpath = os.path.join(canned_wks_dir, fname)
82 return fullpath
83 return None
84
85
86def list_canned_images(scripts_path):
87 """
88 List the .wks files in the canned image dir, minus the extension.
89 """
90 layers_canned_wks_dir = build_canned_image_list(scripts_path)
91
92 for canned_wks_dir in layers_canned_wks_dir:
93 for root, dirs, files in os.walk(canned_wks_dir):
94 for fname in files:
95 if fname.endswith("~") or fname.endswith("#"):
96 continue
97 if fname.endswith(".wks") or fname.endswith(".wks.in"):
98 fullpath = os.path.join(canned_wks_dir, fname)
99 with open(fullpath) as wks:
100 for line in wks:
101 desc = ""
102 idx = line.find("short-description:")
103 if idx != -1:
104 desc = line[idx + len("short-description:"):].strip()
105 break
106 basename = fname.split('.')[0]
107 print(" %s\t\t%s" % (basename.ljust(30), desc))
108
109
110def list_canned_image_help(scripts_path, fullpath):
111 """
112 List the help and params in the specified canned image.
113 """
114 found = False
115 with open(fullpath) as wks:
116 for line in wks:
117 if not found:
118 idx = line.find("long-description:")
119 if idx != -1:
120 print()
121 print(line[idx + len("long-description:"):].strip())
122 found = True
123 continue
124 if not line.strip():
125 break
126 idx = line.find("#")
127 if idx != -1:
128 print(line[idx + len("#:"):].rstrip())
129 else:
130 break
131
132
133def list_source_plugins():
134 """
135 List the available source plugins i.e. plugins available for --source.
136 """
137 plugins = PluginMgr.get_plugins('source')
138
139 for plugin in plugins:
140 print(" %s" % plugin)
141
142
143def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir,
144 native_sysroot, options):
145 """
146 Create image
147
148 wks_file - user-defined OE kickstart file
149 rootfs_dir - absolute path to the build's /rootfs dir
150 bootimg_dir - absolute path to the build's boot artifacts directory
151 kernel_dir - absolute path to the build's kernel directory
152 native_sysroot - absolute path to the build's native sysroots dir
153 image_output_dir - dirname to create for image
154 options - wic command line options (debug, bmap, etc)
155
156 Normally, the values for the build artifacts values are determined
157 by 'wic -e' from the output of the 'bitbake -e' command given an
158 image name e.g. 'core-image-minimal' and a given machine set in
159 local.conf. If that's the case, the variables get the following
160 values from the output of 'bitbake -e':
161
162 rootfs_dir: IMAGE_ROOTFS
163 kernel_dir: DEPLOY_DIR_IMAGE
164 native_sysroot: STAGING_DIR_NATIVE
165
166 In the above case, bootimg_dir remains unset and the
167 plugin-specific image creation code is responsible for finding the
168 bootimg artifacts.
169
170 In the case where the values are passed in explicitly i.e 'wic -e'
171 is not used but rather the individual 'wic' options are used to
172 explicitly specify these values.
173 """
174 try:
175 oe_builddir = os.environ["BUILDDIR"]
176 except KeyError:
177 raise WicError("BUILDDIR not found, exiting. (Did you forget to source oe-init-build-env?)")
178
179 if not os.path.exists(options.outdir):
180 os.makedirs(options.outdir)
181
182 pname = options.imager
183 plugin_class = PluginMgr.get_plugins('imager').get(pname)
184 if not plugin_class:
185 raise WicError('Unknown plugin: %s' % pname)
186
187 plugin = plugin_class(wks_file, rootfs_dir, bootimg_dir, kernel_dir,
188 native_sysroot, oe_builddir, options)
189
190 plugin.do_create()
191
192 logger.info("The image(s) were created using OE kickstart file:\n %s", wks_file)
193
194
195def wic_list(args, scripts_path):
196 """
197 Print the list of images or source plugins.
198 """
199 if args.list_type is None:
200 return False
201
202 if args.list_type == "images":
203
204 list_canned_images(scripts_path)
205 return True
206 elif args.list_type == "source-plugins":
207 list_source_plugins()
208 return True
209 elif len(args.help_for) == 1 and args.help_for[0] == 'help':
210 wks_file = args.list_type
211 fullpath = find_canned_image(scripts_path, wks_file)
212 if not fullpath:
213 raise WicError("No image named %s found, exiting. "
214 "(Use 'wic list images' to list available images, "
215 "or specify a fully-qualified OE kickstart (.wks) "
216 "filename)" % wks_file)
217
218 list_canned_image_help(scripts_path, fullpath)
219 return True
220
221 return False
222
223
224class Disk:
225 def __init__(self, imagepath, native_sysroot, fstypes=('fat', 'ext')):
226 self.imagepath = imagepath
227 self.native_sysroot = native_sysroot
228 self.fstypes = fstypes
229 self._partitions = None
230 self._partimages = {}
231 self._lsector_size = None
232 self._psector_size = None
233 self._ptable_format = None
234
235 # define sector size
236 sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE')
237 if sector_size_str is not None:
238 try:
239 self.sector_size = int(sector_size_str)
240 except ValueError:
241 self.sector_size = None
242 else:
243 self.sector_size = None
244
245 # find parted
246 # read paths from $PATH environment variable
247 # if it fails, use hardcoded paths
248 pathlist = "/bin:/usr/bin:/usr/sbin:/sbin/"
249 try:
250 self.paths = os.environ['PATH'] + ":" + pathlist
251 except KeyError:
252 self.paths = pathlist
253
254 if native_sysroot:
255 for path in pathlist.split(':'):
256 self.paths = "%s%s:%s" % (native_sysroot, path, self.paths)
257
258 self.parted = shutil.which("parted", path=self.paths)
259 if not self.parted:
260 raise WicError("Can't find executable parted")
261
262 self.partitions = self.get_partitions()
263
264 def __del__(self):
265 for path in self._partimages.values():
266 os.unlink(path)
267
268 def get_partitions(self):
269 if self._partitions is None:
270 self._partitions = OrderedDict()
271
272 if self.sector_size is not None:
273 out = exec_cmd("export PARTED_SECTOR_SIZE=%d; %s -sm %s unit B print" % \
274 (self.sector_size, self.parted, self.imagepath), True)
275 else:
276 out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath))
277
278 parttype = namedtuple("Part", "pnum start end size fstype")
279 splitted = out.splitlines()
280 # skip over possible errors in exec_cmd output
281 try:
282 idx =splitted.index("BYT;")
283 except ValueError:
284 raise WicError("Error getting partition information from %s" % (self.parted))
285 lsector_size, psector_size, self._ptable_format = splitted[idx + 1].split(":")[3:6]
286 self._lsector_size = int(lsector_size)
287 self._psector_size = int(psector_size)
288 for line in splitted[idx + 2:]:
289 pnum, start, end, size, fstype = line.split(':')[:5]
290 partition = parttype(int(pnum), int(start[:-1]), int(end[:-1]),
291 int(size[:-1]), fstype)
292 self._partitions[pnum] = partition
293
294 return self._partitions
295
296 def __getattr__(self, name):
297 """Get path to the executable in a lazy way."""
298 if name in ("mdir", "mcopy", "mdel", "mdeltree", "sfdisk", "e2fsck",
299 "resize2fs", "mkswap", "mkdosfs", "debugfs","blkid"):
300 aname = "_%s" % name
301 if aname not in self.__dict__:
302 setattr(self, aname, shutil.which(name, path=self.paths))
303 if aname not in self.__dict__ or self.__dict__[aname] is None:
304 raise WicError("Can't find executable '{}'".format(name))
305 return self.__dict__[aname]
306 return self.__dict__[name]
307
308 def _get_part_image(self, pnum):
309 if pnum not in self.partitions:
310 raise WicError("Partition %s is not in the image" % pnum)
311 part = self.partitions[pnum]
312 # check if fstype is supported
313 for fstype in self.fstypes:
314 if part.fstype.startswith(fstype):
315 break
316 else:
317 raise WicError("Not supported fstype: {}".format(part.fstype))
318 if pnum not in self._partimages:
319 tmpf = tempfile.NamedTemporaryFile(prefix="wic-part")
320 dst_fname = tmpf.name
321 tmpf.close()
322 sparse_copy(self.imagepath, dst_fname, skip=part.start, length=part.size)
323 self._partimages[pnum] = dst_fname
324
325 return self._partimages[pnum]
326
327 def _put_part_image(self, pnum):
328 """Put partition image into partitioned image."""
329 sparse_copy(self._partimages[pnum], self.imagepath,
330 seek=self.partitions[pnum].start)
331
332 def dir(self, pnum, path):
333 if pnum not in self.partitions:
334 raise WicError("Partition %s is not in the image" % pnum)
335
336 if self.partitions[pnum].fstype.startswith('ext'):
337 return exec_cmd("{} {} -R 'ls -l {}'".format(self.debugfs,
338 self._get_part_image(pnum),
339 path), as_shell=True)
340 else: # fat
341 return exec_cmd("{} -i {} ::{}".format(self.mdir,
342 self._get_part_image(pnum),
343 path))
344
345 def copy(self, src, dest):
346 """Copy partition image into wic image."""
347 pnum = dest.part if isinstance(src, str) else src.part
348
349 if self.partitions[pnum].fstype.startswith('ext'):
350 if isinstance(src, str):
351 cmd = "printf 'cd {}\nwrite {} {}\n' | {} -w {}".\
352 format(os.path.dirname(dest.path), src, os.path.basename(src),
353 self.debugfs, self._get_part_image(pnum))
354 else: # copy from wic
355 # run both dump and rdump to support both files and directory
356 cmd = "printf 'cd {}\ndump /{} {}\nrdump /{} {}\n' | {} {}".\
357 format(os.path.dirname(src.path), src.path,
358 dest, src.path, dest, self.debugfs,
359 self._get_part_image(pnum))
360 else: # fat
361 if isinstance(src, str):
362 cmd = "{} -i {} -snop {} ::{}".format(self.mcopy,
363 self._get_part_image(pnum),
364 src, dest.path)
365 else:
366 cmd = "{} -i {} -snop ::{} {}".format(self.mcopy,
367 self._get_part_image(pnum),
368 src.path, dest)
369
370 exec_cmd(cmd, as_shell=True)
371 self._put_part_image(pnum)
372
373 def remove_ext(self, pnum, path, recursive):
374 """
375 Remove files/dirs and their contents from the partition.
376 This only applies to ext* partition.
377 """
378 abs_path = re.sub(r'\/\/+', '/', path)
379 cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs,
380 self._get_part_image(pnum),
381 abs_path)
382 out = exec_cmd(cmd , as_shell=True)
383 for line in out.splitlines():
384 if line.startswith("rm:"):
385 if "file is a directory" in line:
386 if recursive:
387 # loop through content and delete them one by one if
388 # flaged with -r
389 subdirs = iter(self.dir(pnum, abs_path).splitlines())
390 next(subdirs)
391 for subdir in subdirs:
392 dir = subdir.split(':')[1].split(" ", 1)[1]
393 if not dir == "." and not dir == "..":
394 self.remove_ext(pnum, "%s/%s" % (abs_path, dir), recursive)
395
396 rmdir_out = exec_cmd("{} {} -wR 'rmdir \"{}\"'".format(self.debugfs,
397 self._get_part_image(pnum),
398 abs_path.rstrip('/'))
399 , as_shell=True)
400
401 for rmdir_line in rmdir_out.splitlines():
402 if "directory not empty" in rmdir_line:
403 raise WicError("Could not complete operation: \n%s \n"
404 "use -r to remove non-empty directory" % rmdir_line)
405 if rmdir_line.startswith("rmdir:"):
406 raise WicError("Could not complete operation: \n%s "
407 "\n%s" % (str(line), rmdir_line))
408
409 else:
410 raise WicError("Could not complete operation: \n%s "
411 "\nUnable to remove %s" % (str(line), abs_path))
412
413 def remove(self, pnum, path, recursive):
414 """Remove files/dirs from the partition."""
415 partimg = self._get_part_image(pnum)
416 if self.partitions[pnum].fstype.startswith('ext'):
417 self.remove_ext(pnum, path, recursive)
418
419 else: # fat
420 cmd = "{} -i {} ::{}".format(self.mdel, partimg, path)
421 try:
422 exec_cmd(cmd)
423 except WicError as err:
424 if "not found" in str(err) or "non empty" in str(err):
425 # mdel outputs 'File ... not found' or 'directory .. non empty"
426 # try to use mdeltree as path could be a directory
427 cmd = "{} -i {} ::{}".format(self.mdeltree,
428 partimg, path)
429 exec_cmd(cmd)
430 else:
431 raise err
432 self._put_part_image(pnum)
433
434 def write(self, target, expand):
435 """Write disk image to the media or file."""
436 def write_sfdisk_script(outf, parts):
437 for key, val in parts['partitiontable'].items():
438 if key in ("partitions", "device", "firstlba", "lastlba"):
439 continue
440 if key == "id":
441 key = "label-id"
442 outf.write("{}: {}\n".format(key, val))
443 outf.write("\n")
444 for part in parts['partitiontable']['partitions']:
445 line = ''
446 for name in ('attrs', 'name', 'size', 'type', 'uuid'):
447 if name == 'size' and part['type'] == 'f':
448 # don't write size for extended partition
449 continue
450 val = part.get(name)
451 if val:
452 line += '{}={}, '.format(name, val)
453 if line:
454 line = line[:-2] # strip ', '
455 if part.get('bootable'):
456 line += ' ,bootable'
457 outf.write("{}\n".format(line))
458 outf.flush()
459
460 def read_ptable(path):
461 out = exec_cmd("{} -J {}".format(self.sfdisk, path))
462 return json.loads(out)
463
464 def write_ptable(parts, target):
465 with tempfile.NamedTemporaryFile(prefix="wic-sfdisk-", mode='w') as outf:
466 write_sfdisk_script(outf, parts)
467 cmd = "{} --no-reread {} < {} ".format(self.sfdisk, target, outf.name)
468 exec_cmd(cmd, as_shell=True)
469
470 if expand is None:
471 sparse_copy(self.imagepath, target)
472 else:
473 # copy first sectors that may contain bootloader
474 sparse_copy(self.imagepath, target, length=2048 * self._lsector_size)
475
476 # copy source partition table to the target
477 parts = read_ptable(self.imagepath)
478 write_ptable(parts, target)
479
480 # get size of unpartitioned space
481 free = None
482 for line in exec_cmd("{} -F {}".format(self.sfdisk, target)).splitlines():
483 if line.startswith("Unpartitioned space ") and line.endswith("sectors"):
484 free = int(line.split()[-2])
485 # Align free space to a 2048 sector boundary. YOCTO #12840.
486 free = free - (free % 2048)
487 if free is None:
488 raise WicError("Can't get size of unpartitioned space")
489
490 # calculate expanded partitions sizes
491 sizes = {}
492 num_auto_resize = 0
493 for num, part in enumerate(parts['partitiontable']['partitions'], 1):
494 if num in expand:
495 if expand[num] != 0: # don't resize partition if size is set to 0
496 sectors = expand[num] // self._lsector_size
497 free -= sectors - part['size']
498 part['size'] = sectors
499 sizes[num] = sectors
500 elif part['type'] != 'f':
501 sizes[num] = -1
502 num_auto_resize += 1
503
504 for num, part in enumerate(parts['partitiontable']['partitions'], 1):
505 if sizes.get(num) == -1:
506 part['size'] += free // num_auto_resize
507
508 # write resized partition table to the target
509 write_ptable(parts, target)
510
511 # read resized partition table
512 parts = read_ptable(target)
513
514 # copy partitions content
515 for num, part in enumerate(parts['partitiontable']['partitions'], 1):
516 pnum = str(num)
517 fstype = self.partitions[pnum].fstype
518
519 # copy unchanged partition
520 if part['size'] == self.partitions[pnum].size // self._lsector_size:
521 logger.info("copying unchanged partition {}".format(pnum))
522 sparse_copy(self._get_part_image(pnum), target, seek=part['start'] * self._lsector_size)
523 continue
524
525 # resize or re-create partitions
526 if fstype.startswith('ext') or fstype.startswith('fat') or \
527 fstype.startswith('linux-swap'):
528
529 partfname = None
530 with tempfile.NamedTemporaryFile(prefix="wic-part{}-".format(pnum)) as partf:
531 partfname = partf.name
532
533 if fstype.startswith('ext'):
534 logger.info("resizing ext partition {}".format(pnum))
535 partimg = self._get_part_image(pnum)
536 sparse_copy(partimg, partfname)
537 exec_cmd("{} -pf {}".format(self.e2fsck, partfname))
538 exec_cmd("{} {} {}s".format(\
539 self.resize2fs, partfname, part['size']))
540 elif fstype.startswith('fat'):
541 logger.info("copying content of the fat partition {}".format(pnum))
542 with tempfile.TemporaryDirectory(prefix='wic-fatdir-') as tmpdir:
543 # copy content to the temporary directory
544 cmd = "{} -snompi {} :: {}".format(self.mcopy,
545 self._get_part_image(pnum),
546 tmpdir)
547 exec_cmd(cmd)
548 # create new msdos partition
549 label = part.get("name")
550 label_str = "-n {}".format(label) if label else ''
551
552 cmd = "{} {} -C {} {}".format(self.mkdosfs, label_str, partfname,
553 part['size'])
554 exec_cmd(cmd)
555 # copy content from the temporary directory to the new partition
556 cmd = "{} -snompi {} {}/* ::".format(self.mcopy, partfname, tmpdir)
557 exec_cmd(cmd, as_shell=True)
558 elif fstype.startswith('linux-swap'):
559 logger.info("creating swap partition {}".format(pnum))
560 label = part.get("name")
561 label_str = "-L {}".format(label) if label else ''
562 out = exec_cmd("{} --probe {}".format(self.blkid, self._get_part_image(pnum)))
563 uuid = out[out.index("UUID=\"")+6:out.index("UUID=\"")+42]
564 uuid_str = "-U {}".format(uuid) if uuid else ''
565 with open(partfname, 'w') as sparse:
566 os.ftruncate(sparse.fileno(), part['size'] * self._lsector_size)
567 exec_cmd("{} {} {} {}".format(self.mkswap, label_str, uuid_str, partfname))
568 sparse_copy(partfname, target, seek=part['start'] * self._lsector_size)
569 os.unlink(partfname)
570 elif part['type'] != 'f':
571 logger.warning("skipping partition {}: unsupported fstype {}".format(pnum, fstype))
572
573def wic_ls(args, native_sysroot):
574 """List contents of partitioned image or vfat partition."""
575 disk = Disk(args.path.image, native_sysroot)
576 if not args.path.part:
577 if disk.partitions:
578 print('Num Start End Size Fstype')
579 for part in disk.partitions.values():
580 print("{:2d} {:12d} {:12d} {:12d} {}".format(\
581 part.pnum, part.start, part.end,
582 part.size, part.fstype))
583 else:
584 path = args.path.path or '/'
585 print(disk.dir(args.path.part, path))
586
587def wic_cp(args, native_sysroot):
588 """
589 Copy file or directory to/from the vfat/ext partition of
590 partitioned image.
591 """
592 if isinstance(args.dest, str):
593 disk = Disk(args.src.image, native_sysroot)
594 else:
595 disk = Disk(args.dest.image, native_sysroot)
596 disk.copy(args.src, args.dest)
597
598
599def wic_rm(args, native_sysroot):
600 """
601 Remove files or directories from the vfat partition of
602 partitioned image.
603 """
604 disk = Disk(args.path.image, native_sysroot)
605 disk.remove(args.path.part, args.path.path, args.recursive_delete)
606
607def wic_write(args, native_sysroot):
608 """
609 Write image to a target device.
610 """
611 disk = Disk(args.image, native_sysroot, ('fat', 'ext', 'linux-swap'))
612 disk.write(args.target, args.expand)
613
614def find_canned(scripts_path, file_name):
615 """
616 Find a file either by its path or by name in the canned files dir.
617
618 Return None if not found
619 """
620 if os.path.exists(file_name):
621 return file_name
622
623 layers_canned_wks_dir = build_canned_image_list(scripts_path)
624 for canned_wks_dir in layers_canned_wks_dir:
625 for root, dirs, files in os.walk(canned_wks_dir):
626 for fname in files:
627 if fname == file_name:
628 fullpath = os.path.join(canned_wks_dir, fname)
629 return fullpath
630
631def get_custom_config(boot_file):
632 """
633 Get the custom configuration to be used for the bootloader.
634
635 Return None if the file can't be found.
636 """
637 # Get the scripts path of poky
638 scripts_path = os.path.abspath("%s/../.." % os.path.dirname(__file__))
639
640 cfg_file = find_canned(scripts_path, boot_file)
641 if cfg_file:
642 with open(cfg_file, "r") as f:
643 config = f.read()
644 return config
diff --git a/scripts/lib/wic/filemap.py b/scripts/lib/wic/filemap.py
deleted file mode 100644
index 85b39d5d74..0000000000
--- a/scripts/lib/wic/filemap.py
+++ /dev/null
@@ -1,583 +0,0 @@
1#
2# Copyright (c) 2012 Intel, Inc.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7"""
8This module implements python implements a way to get file block. Two methods
9are supported - the FIEMAP ioctl and the 'SEEK_HOLE / SEEK_DATA' features of
10the file seek syscall. The former is implemented by the 'FilemapFiemap' class,
11the latter is implemented by the 'FilemapSeek' class. Both classes provide the
12same API. The 'filemap' function automatically selects which class can be used
13and returns an instance of the class.
14"""
15
16# Disable the following pylint recommendations:
17# * Too many instance attributes (R0902)
18# pylint: disable=R0902
19
20import errno
21import os
22import struct
23import array
24import fcntl
25import tempfile
26import logging
27
28def get_block_size(file_obj):
29 """
30 Returns block size for file object 'file_obj'. Errors are indicated by the
31 'IOError' exception.
32 """
33 # Get the block size of the host file-system for the image file by calling
34 # the FIGETBSZ ioctl (number 2).
35 try:
36 binary_data = fcntl.ioctl(file_obj, 2, struct.pack('I', 0))
37 bsize = struct.unpack('I', binary_data)[0]
38 except OSError:
39 bsize = None
40
41 # If ioctl causes OSError or give bsize to zero failback to os.fstat
42 if not bsize:
43 import os
44 stat = os.fstat(file_obj.fileno())
45 if hasattr(stat, 'st_blksize'):
46 bsize = stat.st_blksize
47 else:
48 raise IOError("Unable to determine block size")
49
50 # The logic in this script only supports a maximum of a 4KB
51 # block size
52 max_block_size = 4 * 1024
53 if bsize > max_block_size:
54 bsize = max_block_size
55
56 return bsize
57
58class ErrorNotSupp(Exception):
59 """
60 An exception of this type is raised when the 'FIEMAP' or 'SEEK_HOLE' feature
61 is not supported either by the kernel or the file-system.
62 """
63 pass
64
65class Error(Exception):
66 """A class for all the other exceptions raised by this module."""
67 pass
68
69
70class _FilemapBase(object):
71 """
72 This is a base class for a couple of other classes in this module. This
73 class simply performs the common parts of the initialization process: opens
74 the image file, gets its size, etc. The 'log' parameter is the logger object
75 to use for printing messages.
76 """
77
78 def __init__(self, image, log=None):
79 """
80 Initialize a class instance. The 'image' argument is full path to the
81 file or file object to operate on.
82 """
83
84 self._log = log
85 if self._log is None:
86 self._log = logging.getLogger(__name__)
87
88 self._f_image_needs_close = False
89
90 if hasattr(image, "fileno"):
91 self._f_image = image
92 self._image_path = image.name
93 else:
94 self._image_path = image
95 self._open_image_file()
96
97 try:
98 self.image_size = os.fstat(self._f_image.fileno()).st_size
99 except IOError as err:
100 raise Error("cannot get information about file '%s': %s"
101 % (self._f_image.name, err))
102
103 try:
104 self.block_size = get_block_size(self._f_image)
105 except IOError as err:
106 raise Error("cannot get block size for '%s': %s"
107 % (self._image_path, err))
108
109 self.blocks_cnt = self.image_size + self.block_size - 1
110 self.blocks_cnt //= self.block_size
111
112 try:
113 self._f_image.flush()
114 except IOError as err:
115 raise Error("cannot flush image file '%s': %s"
116 % (self._image_path, err))
117
118 try:
119 os.fsync(self._f_image.fileno()),
120 except OSError as err:
121 raise Error("cannot synchronize image file '%s': %s "
122 % (self._image_path, err.strerror))
123
124 self._log.debug("opened image \"%s\"" % self._image_path)
125 self._log.debug("block size %d, blocks count %d, image size %d"
126 % (self.block_size, self.blocks_cnt, self.image_size))
127
128 def __del__(self):
129 """The class destructor which just closes the image file."""
130 if self._f_image_needs_close:
131 self._f_image.close()
132
133 def _open_image_file(self):
134 """Open the image file."""
135 try:
136 self._f_image = open(self._image_path, 'rb')
137 except IOError as err:
138 raise Error("cannot open image file '%s': %s"
139 % (self._image_path, err))
140
141 self._f_image_needs_close = True
142
143 def block_is_mapped(self, block): # pylint: disable=W0613,R0201
144 """
145 This method has has to be implemented by child classes. It returns
146 'True' if block number 'block' of the image file is mapped and 'False'
147 otherwise.
148 """
149
150 raise Error("the method is not implemented")
151
152 def get_mapped_ranges(self, start, count): # pylint: disable=W0613,R0201
153 """
154 This method has has to be implemented by child classes. This is a
155 generator which yields ranges of mapped blocks in the file. The ranges
156 are tuples of 2 elements: [first, last], where 'first' is the first
157 mapped block and 'last' is the last mapped block.
158
159 The ranges are yielded for the area of the file of size 'count' blocks,
160 starting from block 'start'.
161 """
162
163 raise Error("the method is not implemented")
164
165
166# The 'SEEK_HOLE' and 'SEEK_DATA' options of the file seek system call
167_SEEK_DATA = 3
168_SEEK_HOLE = 4
169
170def _lseek(file_obj, offset, whence):
171 """This is a helper function which invokes 'os.lseek' for file object
172 'file_obj' and with specified 'offset' and 'whence'. The 'whence'
173 argument is supposed to be either '_SEEK_DATA' or '_SEEK_HOLE'. When
174 there is no more data or hole starting from 'offset', this function
175 returns '-1'. Otherwise the data or hole position is returned."""
176
177 try:
178 return os.lseek(file_obj.fileno(), offset, whence)
179 except OSError as err:
180 # The 'lseek' system call returns the ENXIO if there is no data or
181 # hole starting from the specified offset.
182 if err.errno == errno.ENXIO:
183 return -1
184 elif err.errno == errno.EINVAL:
185 raise ErrorNotSupp("the kernel or file-system does not support "
186 "\"SEEK_HOLE\" and \"SEEK_DATA\"")
187 else:
188 raise
189
190class FilemapSeek(_FilemapBase):
191 """
192 This class uses the 'SEEK_HOLE' and 'SEEK_DATA' to find file block mapping.
193 Unfortunately, the current implementation requires the caller to have write
194 access to the image file.
195 """
196
197 def __init__(self, image, log=None):
198 """Refer the '_FilemapBase' class for the documentation."""
199
200 # Call the base class constructor first
201 _FilemapBase.__init__(self, image, log)
202 self._log.debug("FilemapSeek: initializing")
203
204 self._probe_seek_hole()
205
206 def _probe_seek_hole(self):
207 """
208 Check whether the system implements 'SEEK_HOLE' and 'SEEK_DATA'.
209 Unfortunately, there seems to be no clean way for detecting this,
210 because often the system just fakes them by just assuming that all
211 files are fully mapped, so 'SEEK_HOLE' always returns EOF and
212 'SEEK_DATA' always returns the requested offset.
213
214 I could not invent a better way of detecting the fake 'SEEK_HOLE'
215 implementation than just to create a temporary file in the same
216 directory where the image file resides. It would be nice to change this
217 to something better.
218 """
219
220 directory = os.path.dirname(self._image_path)
221
222 try:
223 tmp_obj = tempfile.TemporaryFile("w+", dir=directory)
224 except IOError as err:
225 raise ErrorNotSupp("cannot create a temporary in \"%s\": %s" \
226 % (directory, err))
227
228 try:
229 os.ftruncate(tmp_obj.fileno(), self.block_size)
230 except OSError as err:
231 raise ErrorNotSupp("cannot truncate temporary file in \"%s\": %s"
232 % (directory, err))
233
234 offs = _lseek(tmp_obj, 0, _SEEK_HOLE)
235 if offs != 0:
236 # We are dealing with the stub 'SEEK_HOLE' implementation which
237 # always returns EOF.
238 self._log.debug("lseek(0, SEEK_HOLE) returned %d" % offs)
239 raise ErrorNotSupp("the file-system does not support "
240 "\"SEEK_HOLE\" and \"SEEK_DATA\" but only "
241 "provides a stub implementation")
242
243 tmp_obj.close()
244
245 def block_is_mapped(self, block):
246 """Refer the '_FilemapBase' class for the documentation."""
247 offs = _lseek(self._f_image, block * self.block_size, _SEEK_DATA)
248 if offs == -1:
249 result = False
250 else:
251 result = (offs // self.block_size == block)
252
253 self._log.debug("FilemapSeek: block_is_mapped(%d) returns %s"
254 % (block, result))
255 return result
256
257 def _get_ranges(self, start, count, whence1, whence2):
258 """
259 This function implements 'get_mapped_ranges()' depending
260 on what is passed in the 'whence1' and 'whence2' arguments.
261 """
262
263 assert whence1 != whence2
264 end = start * self.block_size
265 limit = end + count * self.block_size
266
267 while True:
268 start = _lseek(self._f_image, end, whence1)
269 if start == -1 or start >= limit or start == self.image_size:
270 break
271
272 end = _lseek(self._f_image, start, whence2)
273 if end == -1 or end == self.image_size:
274 end = self.blocks_cnt * self.block_size
275 if end > limit:
276 end = limit
277
278 start_blk = start // self.block_size
279 end_blk = end // self.block_size - 1
280 self._log.debug("FilemapSeek: yielding range (%d, %d)"
281 % (start_blk, end_blk))
282 yield (start_blk, end_blk)
283
284 def get_mapped_ranges(self, start, count):
285 """Refer the '_FilemapBase' class for the documentation."""
286 self._log.debug("FilemapSeek: get_mapped_ranges(%d, %d(%d))"
287 % (start, count, start + count - 1))
288 return self._get_ranges(start, count, _SEEK_DATA, _SEEK_HOLE)
289
290
291# Below goes the FIEMAP ioctl implementation, which is not very readable
292# because it deals with the rather complex FIEMAP ioctl. To understand the
293# code, you need to know the FIEMAP interface, which is documented in the
294# "Documentation/filesystems/fiemap.txt" file in the Linux kernel sources.
295
296# Format string for 'struct fiemap'
297_FIEMAP_FORMAT = "=QQLLLL"
298# sizeof(struct fiemap)
299_FIEMAP_SIZE = struct.calcsize(_FIEMAP_FORMAT)
300# Format string for 'struct fiemap_extent'
301_FIEMAP_EXTENT_FORMAT = "=QQQQQLLLL"
302# sizeof(struct fiemap_extent)
303_FIEMAP_EXTENT_SIZE = struct.calcsize(_FIEMAP_EXTENT_FORMAT)
304# The FIEMAP ioctl number
305_FIEMAP_IOCTL = 0xC020660B
306# This FIEMAP ioctl flag which instructs the kernel to sync the file before
307# reading the block map
308_FIEMAP_FLAG_SYNC = 0x00000001
309# Size of the buffer for 'struct fiemap_extent' elements which will be used
310# when invoking the FIEMAP ioctl. The larger is the buffer, the less times the
311# FIEMAP ioctl will be invoked.
312_FIEMAP_BUFFER_SIZE = 256 * 1024
313
314class FilemapFiemap(_FilemapBase):
315 """
316 This class provides API to the FIEMAP ioctl. Namely, it allows to iterate
317 over all mapped blocks and over all holes.
318
319 This class synchronizes the image file every time it invokes the FIEMAP
320 ioctl in order to work-around early FIEMAP implementation kernel bugs.
321 """
322
323 def __init__(self, image, log=None):
324 """
325 Initialize a class instance. The 'image' argument is full the file
326 object to operate on.
327 """
328
329 # Call the base class constructor first
330 _FilemapBase.__init__(self, image, log)
331 self._log.debug("FilemapFiemap: initializing")
332
333 self._buf_size = _FIEMAP_BUFFER_SIZE
334
335 # Calculate how many 'struct fiemap_extent' elements fit the buffer
336 self._buf_size -= _FIEMAP_SIZE
337 self._fiemap_extent_cnt = self._buf_size // _FIEMAP_EXTENT_SIZE
338 assert self._fiemap_extent_cnt > 0
339 self._buf_size = self._fiemap_extent_cnt * _FIEMAP_EXTENT_SIZE
340 self._buf_size += _FIEMAP_SIZE
341
342 # Allocate a mutable buffer for the FIEMAP ioctl
343 self._buf = array.array('B', [0] * self._buf_size)
344
345 # Check if the FIEMAP ioctl is supported
346 self.block_is_mapped(0)
347
348 def _invoke_fiemap(self, block, count):
349 """
350 Invoke the FIEMAP ioctl for 'count' blocks of the file starting from
351 block number 'block'.
352
353 The full result of the operation is stored in 'self._buf' on exit.
354 Returns the unpacked 'struct fiemap' data structure in form of a python
355 list (just like 'struct.upack()').
356 """
357
358 if self.blocks_cnt != 0 and (block < 0 or block >= self.blocks_cnt):
359 raise Error("bad block number %d, should be within [0, %d]"
360 % (block, self.blocks_cnt))
361
362 # Initialize the 'struct fiemap' part of the buffer. We use the
363 # '_FIEMAP_FLAG_SYNC' flag in order to make sure the file is
364 # synchronized. The reason for this is that early FIEMAP
365 # implementations had many bugs related to cached dirty data, and
366 # synchronizing the file is a necessary work-around.
367 struct.pack_into(_FIEMAP_FORMAT, self._buf, 0, block * self.block_size,
368 count * self.block_size, _FIEMAP_FLAG_SYNC, 0,
369 self._fiemap_extent_cnt, 0)
370
371 try:
372 fcntl.ioctl(self._f_image, _FIEMAP_IOCTL, self._buf, 1)
373 except IOError as err:
374 # Note, the FIEMAP ioctl is supported by the Linux kernel starting
375 # from version 2.6.28 (year 2008).
376 if err.errno == errno.EOPNOTSUPP:
377 errstr = "FilemapFiemap: the FIEMAP ioctl is not supported " \
378 "by the file-system"
379 self._log.debug(errstr)
380 raise ErrorNotSupp(errstr)
381 if err.errno == errno.ENOTTY:
382 errstr = "FilemapFiemap: the FIEMAP ioctl is not supported " \
383 "by the kernel"
384 self._log.debug(errstr)
385 raise ErrorNotSupp(errstr)
386 raise Error("the FIEMAP ioctl failed for '%s': %s"
387 % (self._image_path, err))
388
389 return struct.unpack(_FIEMAP_FORMAT, self._buf[:_FIEMAP_SIZE])
390
391 def block_is_mapped(self, block):
392 """Refer the '_FilemapBase' class for the documentation."""
393 struct_fiemap = self._invoke_fiemap(block, 1)
394
395 # The 3rd element of 'struct_fiemap' is the 'fm_mapped_extents' field.
396 # If it contains zero, the block is not mapped, otherwise it is
397 # mapped.
398 result = bool(struct_fiemap[3])
399 self._log.debug("FilemapFiemap: block_is_mapped(%d) returns %s"
400 % (block, result))
401 return result
402
403 def _unpack_fiemap_extent(self, index):
404 """
405 Unpack a 'struct fiemap_extent' structure object number 'index' from
406 the internal 'self._buf' buffer.
407 """
408
409 offset = _FIEMAP_SIZE + _FIEMAP_EXTENT_SIZE * index
410 return struct.unpack(_FIEMAP_EXTENT_FORMAT,
411 self._buf[offset : offset + _FIEMAP_EXTENT_SIZE])
412
413 def _do_get_mapped_ranges(self, start, count):
414 """
415 Implements most the functionality for the 'get_mapped_ranges()'
416 generator: invokes the FIEMAP ioctl, walks through the mapped extents
417 and yields mapped block ranges. However, the ranges may be consecutive
418 (e.g., (1, 100), (100, 200)) and 'get_mapped_ranges()' simply merges
419 them.
420 """
421
422 block = start
423 while block < start + count:
424 struct_fiemap = self._invoke_fiemap(block, count)
425
426 mapped_extents = struct_fiemap[3]
427 if mapped_extents == 0:
428 # No more mapped blocks
429 return
430
431 extent = 0
432 while extent < mapped_extents:
433 fiemap_extent = self._unpack_fiemap_extent(extent)
434
435 # Start of the extent
436 extent_start = fiemap_extent[0]
437 # Starting block number of the extent
438 extent_block = extent_start // self.block_size
439 # Length of the extent
440 extent_len = fiemap_extent[2]
441 # Count of blocks in the extent
442 extent_count = extent_len // self.block_size
443
444 # Extent length and offset have to be block-aligned
445 assert extent_start % self.block_size == 0
446 assert extent_len % self.block_size == 0
447
448 if extent_block > start + count - 1:
449 return
450
451 first = max(extent_block, block)
452 last = min(extent_block + extent_count, start + count) - 1
453 yield (first, last)
454
455 extent += 1
456
457 block = extent_block + extent_count
458
459 def get_mapped_ranges(self, start, count):
460 """Refer the '_FilemapBase' class for the documentation."""
461 self._log.debug("FilemapFiemap: get_mapped_ranges(%d, %d(%d))"
462 % (start, count, start + count - 1))
463 iterator = self._do_get_mapped_ranges(start, count)
464 first_prev, last_prev = next(iterator)
465
466 for first, last in iterator:
467 if last_prev == first - 1:
468 last_prev = last
469 else:
470 self._log.debug("FilemapFiemap: yielding range (%d, %d)"
471 % (first_prev, last_prev))
472 yield (first_prev, last_prev)
473 first_prev, last_prev = first, last
474
475 self._log.debug("FilemapFiemap: yielding range (%d, %d)"
476 % (first_prev, last_prev))
477 yield (first_prev, last_prev)
478
479class FilemapNobmap(_FilemapBase):
480 """
481 This class is used when both the 'SEEK_DATA/HOLE' and FIEMAP are not
482 supported by the filesystem or kernel.
483 """
484
485 def __init__(self, image, log=None):
486 """Refer the '_FilemapBase' class for the documentation."""
487
488 # Call the base class constructor first
489 _FilemapBase.__init__(self, image, log)
490 self._log.debug("FilemapNobmap: initializing")
491
492 def block_is_mapped(self, block):
493 """Refer the '_FilemapBase' class for the documentation."""
494 return True
495
496 def get_mapped_ranges(self, start, count):
497 """Refer the '_FilemapBase' class for the documentation."""
498 self._log.debug("FilemapNobmap: get_mapped_ranges(%d, %d(%d))"
499 % (start, count, start + count - 1))
500 yield (start, start + count -1)
501
502def filemap(image, log=None):
503 """
504 Create and return an instance of a Filemap class - 'FilemapFiemap' or
505 'FilemapSeek', depending on what the system we run on supports. If the
506 FIEMAP ioctl is supported, an instance of the 'FilemapFiemap' class is
507 returned. Otherwise, if 'SEEK_HOLE' is supported an instance of the
508 'FilemapSeek' class is returned. If none of these are supported, the
509 function generates an 'Error' type exception.
510 """
511
512 try:
513 return FilemapFiemap(image, log)
514 except ErrorNotSupp:
515 try:
516 return FilemapSeek(image, log)
517 except ErrorNotSupp:
518 return FilemapNobmap(image, log)
519
520def sparse_copy(src_fname, dst_fname, skip=0, seek=0,
521 length=0, api=None):
522 """
523 Efficiently copy sparse file to or into another file.
524
525 src_fname: path to source file
526 dst_fname: path to destination file
527 skip: skip N bytes at thestart of src
528 seek: seek N bytes from the start of dst
529 length: read N bytes from src and write them to dst
530 api: FilemapFiemap or FilemapSeek object
531 """
532 if not api:
533 api = filemap
534 fmap = api(src_fname)
535 try:
536 dst_file = open(dst_fname, 'r+b')
537 except IOError:
538 dst_file = open(dst_fname, 'wb')
539 if length:
540 dst_size = length + seek
541 else:
542 dst_size = os.path.getsize(src_fname) + seek - skip
543 dst_file.truncate(dst_size)
544
545 written = 0
546 for first, last in fmap.get_mapped_ranges(0, fmap.blocks_cnt):
547 start = first * fmap.block_size
548 end = (last + 1) * fmap.block_size
549
550 if skip >= end:
551 continue
552
553 if start < skip < end:
554 start = skip
555
556 fmap._f_image.seek(start, os.SEEK_SET)
557
558 written += start - skip - written
559 if length and written >= length:
560 dst_file.seek(seek + length, os.SEEK_SET)
561 dst_file.close()
562 return
563
564 dst_file.seek(seek + start - skip, os.SEEK_SET)
565
566 chunk_size = 1024 * 1024
567 to_read = end - start
568 read = 0
569
570 while read < to_read:
571 if read + chunk_size > to_read:
572 chunk_size = to_read - read
573 size = chunk_size
574 if length and written + size > length:
575 size = length - written
576 chunk = fmap._f_image.read(size)
577 dst_file.write(chunk)
578 read += size
579 written += size
580 if written == length:
581 dst_file.close()
582 return
583 dst_file.close()
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py
deleted file mode 100644
index 9180d75a92..0000000000
--- a/scripts/lib/wic/help.py
+++ /dev/null
@@ -1,1180 +0,0 @@
1# Copyright (c) 2013, Intel Corporation.
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5# DESCRIPTION
6# This module implements some basic help invocation functions along
7# with the bulk of the help topic text for the OE Core Image Tools.
8#
9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
11#
12
13import subprocess
14import logging
15
16from wic.pluginbase import PluginMgr, PLUGIN_TYPES
17
18logger = logging.getLogger('wic')
19
20def subcommand_error(args):
21 logger.info("invalid subcommand %s", args[0])
22
23
24def display_help(subcommand, subcommands):
25 """
26 Display help for subcommand.
27 """
28 if subcommand not in subcommands:
29 return False
30
31 hlp = subcommands.get(subcommand, subcommand_error)[2]
32 if callable(hlp):
33 hlp = hlp()
34 pager = subprocess.Popen('less', stdin=subprocess.PIPE)
35 pager.communicate(hlp.encode('utf-8'))
36
37 return True
38
39
40def wic_help(args, usage_str, subcommands):
41 """
42 Subcommand help dispatcher.
43 """
44 if args.help_topic == None or not display_help(args.help_topic, subcommands):
45 print(usage_str)
46
47
48def get_wic_plugins_help():
49 """
50 Combine wic_plugins_help with the help for every known
51 source plugin.
52 """
53 result = wic_plugins_help
54 for plugin_type in PLUGIN_TYPES:
55 result += '\n\n%s PLUGINS\n\n' % plugin_type.upper()
56 for name, plugin in PluginMgr.get_plugins(plugin_type).items():
57 result += "\n %s plugin:\n" % name
58 if plugin.__doc__:
59 result += plugin.__doc__
60 else:
61 result += "\n %s is missing docstring\n" % plugin
62 return result
63
64
65def invoke_subcommand(args, parser, main_command_usage, subcommands):
66 """
67 Dispatch to subcommand handler borrowed from combo-layer.
68 Should use argparse, but has to work in 2.6.
69 """
70 if not args.command:
71 logger.error("No subcommand specified, exiting")
72 parser.print_help()
73 return 1
74 elif args.command == "help":
75 wic_help(args, main_command_usage, subcommands)
76 elif args.command not in subcommands:
77 logger.error("Unsupported subcommand %s, exiting\n", args.command)
78 parser.print_help()
79 return 1
80 else:
81 subcmd = subcommands.get(args.command, subcommand_error)
82 usage = subcmd[1]
83 subcmd[0](args, usage)
84
85
86##
87# wic help and usage strings
88##
89
90wic_usage = """
91
92 Create a customized OpenEmbedded image
93
94 usage: wic [--version] | [--help] | [COMMAND [ARGS]]
95
96 Current 'wic' commands are:
97 help Show help for command or one of the topics (see below)
98 create Create a new OpenEmbedded image
99 list List available canned images and source plugins
100
101 Help topics:
102 overview wic overview - General overview of wic
103 plugins wic plugins - Overview and API
104 kickstart wic kickstart - wic kickstart reference
105"""
106
107wic_help_usage = """
108
109 usage: wic help <subcommand>
110
111 This command displays detailed help for the specified subcommand.
112"""
113
114wic_create_usage = """
115
116 Create a new OpenEmbedded image
117
118 usage: wic create <wks file or image name> [-o <DIRNAME> | --outdir <DIRNAME>]
119 [-e | --image-name] [-s, --skip-build-check] [-D, --debug]
120 [-r, --rootfs-dir] [-b, --bootimg-dir]
121 [-k, --kernel-dir] [-n, --native-sysroot] [-f, --build-rootfs]
122 [-c, --compress-with] [-m, --bmap]
123
124 This command creates an OpenEmbedded image based on the 'OE kickstart
125 commands' found in the <wks file>.
126
127 The -o option can be used to place the image in a directory with a
128 different name and location.
129
130 See 'wic help create' for more detailed instructions.
131"""
132
133wic_create_help = """
134
135NAME
136 wic create - Create a new OpenEmbedded image
137
138SYNOPSIS
139 wic create <wks file or image name> [-o <DIRNAME> | --outdir <DIRNAME>]
140 [-e | --image-name] [-s, --skip-build-check] [-D, --debug]
141 [-r, --rootfs-dir] [-b, --bootimg-dir]
142 [-k, --kernel-dir] [-n, --native-sysroot] [-f, --build-rootfs]
143 [-c, --compress-with] [-m, --bmap] [--no-fstab-update]
144
145DESCRIPTION
146 This command creates an OpenEmbedded image based on the 'OE
147 kickstart commands' found in the <wks file>.
148
149 In order to do this, wic needs to know the locations of the
150 various build artifacts required to build the image.
151
152 Users can explicitly specify the build artifact locations using
153 the -r, -b, -k, and -n options. See below for details on where
154 the corresponding artifacts are typically found in a normal
155 OpenEmbedded build.
156
157 Alternatively, users can use the -e option to have 'wic' determine
158 those locations for a given image. If the -e option is used, the
159 user needs to have set the appropriate MACHINE variable in
160 local.conf, and have sourced the build environment.
161
162 The -e option is used to specify the name of the image to use the
163 artifacts from e.g. core-image-sato.
164
165 The -r option is used to specify the path to the /rootfs dir to
166 use as the .wks rootfs source.
167
168 The -b option is used to specify the path to the dir containing
169 the boot artifacts (e.g. /EFI or /syslinux dirs) to use as the
170 .wks bootimg source.
171
172 The -k option is used to specify the path to the dir containing
173 the kernel to use in the .wks bootimg.
174
175 The -n option is used to specify the path to the native sysroot
176 containing the tools to use to build the image.
177
178 The -f option is used to build rootfs by running "bitbake <image>"
179
180 The -s option is used to skip the build check. The build check is
181 a simple sanity check used to determine whether the user has
182 sourced the build environment so that the -e option can operate
183 correctly. If the user has specified the build artifact locations
184 explicitly, 'wic' assumes the user knows what he or she is doing
185 and skips the build check.
186
187 The -D option is used to display debug information detailing
188 exactly what happens behind the scenes when a create request is
189 fulfilled (or not, as the case may be). It enumerates and
190 displays the command sequence used, and should be included in any
191 bug report describing unexpected results.
192
193 When 'wic -e' is used, the locations for the build artifacts
194 values are determined by 'wic -e' from the output of the 'bitbake
195 -e' command given an image name e.g. 'core-image-minimal' and a
196 given machine set in local.conf. In that case, the image is
197 created as if the following 'bitbake -e' variables were used:
198
199 -r: IMAGE_ROOTFS
200 -k: STAGING_KERNEL_DIR
201 -n: STAGING_DIR_NATIVE
202 -b: empty (plugin-specific handlers must determine this)
203
204 If 'wic -e' is not used, the user needs to select the appropriate
205 value for -b (as well as -r, -k, and -n).
206
207 The -o option can be used to place the image in a directory with a
208 different name and location.
209
210 The -c option is used to specify compressor utility to compress
211 an image. gzip, bzip2 and xz compressors are supported.
212
213 The -m option is used to produce .bmap file for the image. This file
214 can be used to flash image using bmaptool utility.
215
216 The --no-fstab-update option is used to doesn't change fstab file. When
217 using this option the final fstab file will be same that in rootfs and
218 wic doesn't update file, e.g adding a new mount point. User can control
219 the fstab file content in base-files recipe.
220"""
221
222wic_list_usage = """
223
224 List available OpenEmbedded images and source plugins
225
226 usage: wic list images
227 wic list <image> help
228 wic list source-plugins
229
230 This command enumerates the set of available canned images as well as
231 help for those images. It also can be used to list of available source
232 plugins.
233
234 The first form enumerates all the available 'canned' images.
235
236 The second form lists the detailed help information for a specific
237 'canned' image.
238
239 The third form enumerates all the available --sources (source
240 plugins).
241
242 See 'wic help list' for more details.
243"""
244
245wic_list_help = """
246
247NAME
248 wic list - List available OpenEmbedded images and source plugins
249
250SYNOPSIS
251 wic list images
252 wic list <image> help
253 wic list source-plugins
254
255DESCRIPTION
256 This command enumerates the set of available canned images as well
257 as help for those images. It also can be used to list available
258 source plugins.
259
260 The first form enumerates all the available 'canned' images.
261 These are actually just the set of .wks files that have been moved
262 into the /scripts/lib/wic/canned-wks directory).
263
264 The second form lists the detailed help information for a specific
265 'canned' image.
266
267 The third form enumerates all the available --sources (source
268 plugins). The contents of a given partition are driven by code
269 defined in 'source plugins'. Users specify a specific plugin via
270 the --source parameter of the partition .wks command. Normally
271 this is the 'rootfs' plugin but can be any of the more specialized
272 sources listed by the 'list source-plugins' command. Users can
273 also add their own source plugins - see 'wic help plugins' for
274 details.
275"""
276
277wic_ls_usage = """
278
279 List content of a partitioned image
280
281 usage: wic ls <image>[:<partition>[<path>]] [--native-sysroot <path>]
282
283 This command outputs either list of image partitions or directory contents
284 of vfat and ext* partitions.
285
286 See 'wic help ls' for more detailed instructions.
287
288"""
289
290wic_ls_help = """
291
292NAME
293 wic ls - List contents of partitioned image or partition
294
295SYNOPSIS
296 wic ls <image>
297 wic ls <image>:<vfat or ext* partition>
298 wic ls <image>:<vfat or ext* partition><path>
299 wic ls <image>:<vfat or ext* partition><path> --native-sysroot <path>
300
301DESCRIPTION
302 This command lists either partitions of the image or directory contents
303 of vfat or ext* partitions.
304
305 The first form it lists partitions of the image.
306 For example:
307 $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic
308 Num Start End Size Fstype
309 1 1048576 24438783 23390208 fat16
310 2 25165824 50315263 25149440 ext4
311
312 Second and third form list directory content of the partition:
313 $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
314 Volume in drive : is boot
315 Volume Serial Number is 2DF2-5F02
316 Directory for ::/
317
318 efi <DIR> 2017-05-11 10:54
319 startup nsh 26 2017-05-11 10:54
320 vmlinuz 6922288 2017-05-11 10:54
321 3 files 6 922 314 bytes
322 15 818 752 bytes free
323
324
325 $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/EFI/boot/
326 Volume in drive : is boot
327 Volume Serial Number is 2DF2-5F02
328 Directory for ::/EFI/boot
329
330 . <DIR> 2017-05-11 10:54
331 .. <DIR> 2017-05-11 10:54
332 grub cfg 679 2017-05-11 10:54
333 bootx64 efi 571392 2017-05-11 10:54
334 4 files 572 071 bytes
335 15 818 752 bytes free
336
337 The -n option is used to specify the path to the native sysroot
338 containing the tools(parted and mtools) to use.
339
340"""
341
342wic_cp_usage = """
343
344 Copy files and directories to/from the vfat or ext* partition
345
346 usage: wic cp <src> <dest> [--native-sysroot <path>]
347
348 source/destination image in format <image>:<partition>[<path>]
349
350 This command copies files or directories either
351 - from local to vfat or ext* partitions of partitioned image
352 - from vfat or ext* partitions of partitioned image to local
353
354 See 'wic help cp' for more detailed instructions.
355
356"""
357
358wic_cp_help = """
359
360NAME
361 wic cp - copy files and directories to/from the vfat or ext* partitions
362
363SYNOPSIS
364 wic cp <src> <dest>:<partition>
365 wic cp <src>:<partition> <dest>
366 wic cp <src> <dest-image>:<partition><path>
367 wic cp <src> <dest-image>:<partition><path> --native-sysroot <path>
368
369DESCRIPTION
370 This command copies files or directories either
371 - from local to vfat or ext* partitions of partitioned image
372 - from vfat or ext* partitions of partitioned image to local
373
374 The first form of it copies file or directory to the root directory of
375 the partition:
376 $ wic cp test.wks tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
377 $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
378 Volume in drive : is boot
379 Volume Serial Number is DB4C-FD4C
380 Directory for ::/
381
382 efi <DIR> 2017-05-24 18:15
383 loader <DIR> 2017-05-24 18:15
384 startup nsh 26 2017-05-24 18:15
385 vmlinuz 6926384 2017-05-24 18:15
386 test wks 628 2017-05-24 21:22
387 5 files 6 927 038 bytes
388 15 677 440 bytes free
389
390 The second form of the command copies file or directory to the specified directory
391 on the partition:
392 $ wic cp test tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/efi/
393 $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/efi/
394 Volume in drive : is boot
395 Volume Serial Number is DB4C-FD4C
396 Directory for ::/efi
397
398 . <DIR> 2017-05-24 18:15
399 .. <DIR> 2017-05-24 18:15
400 boot <DIR> 2017-05-24 18:15
401 test <DIR> 2017-05-24 21:27
402 4 files 0 bytes
403 15 675 392 bytes free
404
405 The third form of the command copies file or directory from the specified directory
406 on the partition to local:
407 $ wic cp tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/vmlinuz test
408
409 The -n option is used to specify the path to the native sysroot
410 containing the tools(parted and mtools) to use.
411"""
412
413wic_rm_usage = """
414
415 Remove files or directories from the vfat or ext* partitions
416
417 usage: wic rm <image>:<partition><path> [--native-sysroot <path>]
418
419 This command removes files or directories from the vfat or ext* partitions of
420 the partitioned image.
421
422 See 'wic help rm' for more detailed instructions.
423
424"""
425
426wic_rm_help = """
427
428NAME
429 wic rm - remove files or directories from the vfat or ext* partitions
430
431SYNOPSIS
432 wic rm <src> <image>:<partition><path>
433 wic rm <src> <image>:<partition><path> --native-sysroot <path>
434 wic rm -r <image>:<partition><path>
435
436DESCRIPTION
437 This command removes files or directories from the vfat or ext* partition of the
438 partitioned image:
439
440 $ wic ls ./tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
441 Volume in drive : is boot
442 Volume Serial Number is 11D0-DE21
443 Directory for ::/
444
445 libcom32 c32 186500 2017-06-02 15:15
446 libutil c32 24148 2017-06-02 15:15
447 syslinux cfg 209 2017-06-02 15:15
448 vesamenu c32 27104 2017-06-02 15:15
449 vmlinuz 6926384 2017-06-02 15:15
450 5 files 7 164 345 bytes
451 16 582 656 bytes free
452
453 $ wic rm ./tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/libutil.c32
454
455 $ wic ls ./tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
456 Volume in drive : is boot
457 Volume Serial Number is 11D0-DE21
458 Directory for ::/
459
460 libcom32 c32 186500 2017-06-02 15:15
461 syslinux cfg 209 2017-06-02 15:15
462 vesamenu c32 27104 2017-06-02 15:15
463 vmlinuz 6926384 2017-06-02 15:15
464 4 files 7 140 197 bytes
465 16 607 232 bytes free
466
467 The -n option is used to specify the path to the native sysroot
468 containing the tools(parted and mtools) to use.
469
470 The -r option is used to remove directories and their contents
471 recursively,this only applies to ext* partition.
472"""
473
474wic_write_usage = """
475
476 Write image to a device
477
478 usage: wic write <image> <target device> [--expand [rules]] [--native-sysroot <path>]
479
480 This command writes partitioned image to a target device (USB stick, SD card etc).
481
482 See 'wic help write' for more detailed instructions.
483
484"""
485
486wic_write_help = """
487
488NAME
489 wic write - write an image to a device
490
491SYNOPSIS
492 wic write <image> <target>
493 wic write <image> <target> --expand auto
494 wic write <image> <target> --expand 1:100M,2:300M
495 wic write <image> <target> --native-sysroot <path>
496
497DESCRIPTION
498 This command writes an image to a target device (USB stick, SD card etc)
499
500 $ wic write ./tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic /dev/sdb
501
502 The --expand option is used to resize image partitions.
503 --expand auto expands partitions to occupy all free space available on the target device.
504 It's also possible to specify expansion rules in a format
505 <partition>:<size>[,<partition>:<size>...] for one or more partitions.
506 Specifying size 0 will keep partition unmodified.
507 Note: Resizing boot partition can result in non-bootable image for non-EFI images. It is
508 recommended to use size 0 for boot partition to keep image bootable.
509
510 The --native-sysroot option is used to specify the path to the native sysroot
511 containing the tools(parted, resize2fs) to use.
512"""
513
514wic_plugins_help = """
515
516NAME
517 wic plugins - Overview and API
518
519DESCRIPTION
520 plugins allow wic functionality to be extended and specialized by
521 users. This section documents the plugin interface, which is
522 currently restricted to 'source' plugins.
523
524 'Source' plugins provide a mechanism to customize various aspects
525 of the image generation process in wic, mainly the contents of
526 partitions.
527
528 Source plugins provide a mechanism for mapping values specified in
529 .wks files using the --source keyword to a particular plugin
530 implementation that populates a corresponding partition.
531
532 A source plugin is created as a subclass of SourcePlugin (see
533 scripts/lib/wic/pluginbase.py) and the plugin file containing it
534 is added to scripts/lib/wic/plugins/source/ to make the plugin
535 implementation available to the wic implementation.
536
537 Source plugins can also be implemented and added by external
538 layers - any plugins found in a scripts/lib/wic/plugins/source/
539 or lib/wic/plugins/source/ directory in an external layer will
540 also be made available.
541
542 When the wic implementation needs to invoke a partition-specific
543 implementation, it looks for the plugin that has the same name as
544 the --source param given to that partition. For example, if the
545 partition is set up like this:
546
547 part /boot --source bootimg-pcbios ...
548
549 then the methods defined as class members of the plugin having the
550 matching bootimg-pcbios .name class member would be used.
551
552 To be more concrete, here's the plugin definition that would match
553 a '--source bootimg-pcbios' usage, along with an example method
554 that would be called by the wic implementation when it needed to
555 invoke an implementation-specific partition-preparation function:
556
557 class BootimgPcbiosPlugin(SourcePlugin):
558 name = 'bootimg-pcbios'
559
560 @classmethod
561 def do_prepare_partition(self, part, ...)
562
563 If the subclass itself doesn't implement a function, a 'default'
564 version in a superclass will be located and used, which is why all
565 plugins must be derived from SourcePlugin.
566
567 The SourcePlugin class defines the following methods, which is the
568 current set of methods that can be implemented/overridden by
569 --source plugins. Any methods not implemented by a SourcePlugin
570 subclass inherit the implementations present in the SourcePlugin
571 class (see the SourcePlugin source for details):
572
573 do_prepare_partition()
574 Called to do the actual content population for a
575 partition. In other words, it 'prepares' the final partition
576 image which will be incorporated into the disk image.
577
578 do_post_partition()
579 Called after the partition is created. It is useful to add post
580 operations e.g. signing the partition.
581
582 do_configure_partition()
583 Called before do_prepare_partition(), typically used to
584 create custom configuration files for a partition, for
585 example syslinux or grub config files.
586
587 do_install_disk()
588 Called after all partitions have been prepared and assembled
589 into a disk image. This provides a hook to allow
590 finalization of a disk image, for example to write an MBR to
591 it.
592
593 do_stage_partition()
594 Special content-staging hook called before
595 do_prepare_partition(), normally empty.
596
597 Typically, a partition will just use the passed-in
598 parameters, for example the unmodified value of bootimg_dir.
599 In some cases however, things may need to be more tailored.
600 As an example, certain files may additionally need to be
601 take from bootimg_dir + /boot. This hook allows those files
602 to be staged in a customized fashion. Note that
603 get_bitbake_var() allows you to access non-standard
604 variables that you might want to use for these types of
605 situations.
606
607 This scheme is extensible - adding more hooks is a simple matter
608 of adding more plugin methods to SourcePlugin and derived classes.
609 Please see the implementation for details.
610"""
611
612wic_overview_help = """
613
614NAME
615 wic overview - General overview of wic
616
617DESCRIPTION
618 The 'wic' command generates partitioned images from existing
619 OpenEmbedded build artifacts. Image generation is driven by
620 partitioning commands contained in an 'Openembedded kickstart'
621 (.wks) file (see 'wic help kickstart') specified either directly
622 on the command-line or as one of a selection of canned .wks files
623 (see 'wic list images'). When applied to a given set of build
624 artifacts, the result is an image or set of images that can be
625 directly written onto media and used on a particular system.
626
627 The 'wic' command and the infrastructure it's based on is by
628 definition incomplete - its purpose is to allow the generation of
629 customized images, and as such was designed to be completely
630 extensible via a plugin interface (see 'wic help plugins').
631
632 Background and Motivation
633
634 wic is meant to be a completely independent standalone utility
635 that initially provides easier-to-use and more flexible
636 replacements for a couple bits of existing functionality in
637 oe-core: directdisk.bbclass and mkefidisk.sh. The difference
638 between wic and those examples is that with wic the functionality
639 of those scripts is implemented by a general-purpose partitioning
640 'language' based on Red Hat kickstart syntax).
641
642 The initial motivation and design considerations that lead to the
643 current tool are described exhaustively in Yocto Bug #3847
644 (https://bugzilla.yoctoproject.org/show_bug.cgi?id=3847).
645
646 Implementation and Examples
647
648 wic can be used in two different modes, depending on how much
649 control the user needs in specifying the Openembedded build
650 artifacts that will be used in creating the image: 'raw' and
651 'cooked'.
652
653 If used in 'raw' mode, artifacts are explicitly specified via
654 command-line arguments (see example below).
655
656 The more easily usable 'cooked' mode uses the current MACHINE
657 setting and a specified image name to automatically locate the
658 artifacts used to create the image.
659
660 OE kickstart files (.wks) can of course be specified directly on
661 the command-line, but the user can also choose from a set of
662 'canned' .wks files available via the 'wic list images' command
663 (example below).
664
665 In any case, the prerequisite for generating any image is to have
666 the build artifacts already available. The below examples assume
667 the user has already build a 'core-image-minimal' for a specific
668 machine (future versions won't require this redundant step, but
669 for now that's typically how build artifacts get generated).
670
671 The other prerequisite is to source the build environment:
672
673 $ source oe-init-build-env
674
675 To start out with, we'll generate an image from one of the canned
676 .wks files. The following generates a list of availailable
677 images:
678
679 $ wic list images
680 mkefidisk Create an EFI disk image
681 directdisk Create a 'pcbios' direct disk image
682
683 You can get more information about any of the available images by
684 typing 'wic list xxx help', where 'xxx' is one of the image names:
685
686 $ wic list mkefidisk help
687
688 Creates a partitioned EFI disk image that the user can directly dd
689 to boot media.
690
691 At any time, you can get help on the 'wic' command or any
692 subcommand (currently 'list' and 'create'). For instance, to get
693 the description of 'wic create' command and its parameters:
694
695 $ wic create
696
697 Usage:
698
699 Create a new OpenEmbedded image
700
701 usage: wic create <wks file or image name> [-o <DIRNAME> | ...]
702 [-i <JSON PROPERTY FILE> | --infile <JSON PROPERTY_FILE>]
703 [-e | --image-name] [-s, --skip-build-check] [-D, --debug]
704 [-r, --rootfs-dir] [-b, --bootimg-dir] [-k, --kernel-dir]
705 [-n, --native-sysroot] [-f, --build-rootfs]
706
707 This command creates an OpenEmbedded image based on the 'OE
708 kickstart commands' found in the <wks file>.
709
710 The -o option can be used to place the image in a directory
711 with a different name and location.
712
713 See 'wic help create' for more detailed instructions.
714 ...
715
716 As mentioned in the command, you can get even more detailed
717 information by adding 'help' to the above:
718
719 $ wic help create
720
721 So, the easiest way to create an image is to use the -e option
722 with a canned .wks file. To use the -e option, you need to
723 specify the image used to generate the artifacts and you actually
724 need to have the MACHINE used to build them specified in your
725 local.conf (these requirements aren't necessary if you aren't
726 using the -e options.) Below, we generate a directdisk image,
727 pointing the process at the core-image-minimal artifacts for the
728 current MACHINE:
729
730 $ wic create directdisk -e core-image-minimal
731
732 Checking basic build environment...
733 Done.
734
735 Creating image(s)...
736
737 Info: The new image(s) can be found here:
738 /var/tmp/wic/build/directdisk-201309252350-sda.direct
739
740 The following build artifacts were used to create the image(s):
741
742 ROOTFS_DIR: ...
743 BOOTIMG_DIR: ...
744 KERNEL_DIR: ...
745 NATIVE_SYSROOT: ...
746
747 The image(s) were created using OE kickstart file:
748 .../scripts/lib/wic/canned-wks/directdisk.wks
749
750 The output shows the name and location of the image created, and
751 so that you know exactly what was used to generate the image, each
752 of the artifacts and the kickstart file used.
753
754 Similarly, you can create a 'mkefidisk' image in the same way
755 (notice that this example uses a different machine - because it's
756 using the -e option, you need to change the MACHINE in your
757 local.conf):
758
759 $ wic create mkefidisk -e core-image-minimal
760 Checking basic build environment...
761 Done.
762
763 Creating image(s)...
764
765 Info: The new image(s) can be found here:
766 /var/tmp/wic/build/mkefidisk-201309260027-sda.direct
767
768 ...
769
770 Here's an example that doesn't take the easy way out and manually
771 specifies each build artifact, along with a non-canned .wks file,
772 and also uses the -o option to have wic create the output
773 somewhere other than the default /var/tmp/wic:
774
775 $ wic create ./test.wks -o ./out --rootfs-dir
776 tmp/work/qemux86_64-poky-linux/core-image-minimal/1.0-r0/rootfs
777 --bootimg-dir tmp/sysroots/qemux86-64/usr/share
778 --kernel-dir tmp/deploy/images/qemux86-64
779 --native-sysroot tmp/sysroots/x86_64-linux
780
781 Creating image(s)...
782
783 Info: The new image(s) can be found here:
784 out/build/test-201507211313-sda.direct
785
786 The following build artifacts were used to create the image(s):
787 ROOTFS_DIR: tmp/work/qemux86_64-poky-linux/core-image-minimal/1.0-r0/rootfs
788 BOOTIMG_DIR: tmp/sysroots/qemux86-64/usr/share
789 KERNEL_DIR: tmp/deploy/images/qemux86-64
790 NATIVE_SYSROOT: tmp/sysroots/x86_64-linux
791
792 The image(s) were created using OE kickstart file:
793 ./test.wks
794
795 Here is a content of test.wks:
796
797 part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
798 part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024
799
800 bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0"
801
802
803 Finally, here's an example of the actual partition language
804 commands used to generate the mkefidisk image i.e. these are the
805 contents of the mkefidisk.wks OE kickstart file:
806
807 # short-description: Create an EFI disk image
808 # long-description: Creates a partitioned EFI disk image that the user
809 # can directly dd to boot media.
810
811 part /boot --source bootimg-efi --ondisk sda --fstype=efi --active
812
813 part / --source rootfs --ondisk sda --fstype=ext3 --label platform
814
815 part swap --ondisk sda --size 44 --label swap1 --fstype=swap
816
817 bootloader --timeout=10 --append="rootwait console=ttyPCH0,115200"
818
819 You can get a complete listing and description of all the
820 kickstart commands available for use in .wks files from 'wic help
821 kickstart'.
822"""
823
824wic_kickstart_help = """
825
826NAME
827 wic kickstart - wic kickstart reference
828
829DESCRIPTION
830 This section provides the definitive reference to the wic
831 kickstart language. It also provides documentation on the list of
832 --source plugins available for use from the 'part' command (see
833 the 'Platform-specific Plugins' section below).
834
835 The current wic implementation supports only the basic kickstart
836 partitioning commands: partition (or part for short) and
837 bootloader.
838
839 The following is a listing of the commands, their syntax, and
840 meanings. The commands are based on the Fedora kickstart
841 documentation but with modifications to reflect wic capabilities.
842
843 https://pykickstart.readthedocs.io/en/latest/kickstart-docs.html#part-or-partition
844 https://pykickstart.readthedocs.io/en/latest/kickstart-docs.html#bootloader
845
846 Commands
847
848 * 'part' or 'partition'
849
850 This command creates a partition on the system and uses the
851 following syntax:
852
853 part [<mountpoint>]
854
855 The <mountpoint> is where the partition will be mounted and
856 must take of one of the following forms:
857
858 /<path>: For example: /, /usr, or /home
859
860 swap: The partition will be used as swap space.
861
862 If a <mountpoint> is not specified the partition will be created
863 but will not be mounted.
864
865 Partitions with a <mountpoint> specified will be automatically mounted.
866 This is achieved by wic adding entries to the fstab during image
867 generation. In order for a valid fstab to be generated one of the
868 --ondrive, --ondisk, --use-uuid or --use-label partition options must
869 be used for each partition that specifies a mountpoint. Note that with
870 --use-{uuid,label} and non-root <mountpoint>, including swap, the mount
871 program must understand the PARTUUID or LABEL syntax. This currently
872 excludes the busybox versions of these applications.
873
874
875 The following are supported 'part' options:
876
877 --size: The minimum partition size. Specify an integer value
878 such as 500. Multipliers k, M ang G can be used. If
879 not specified, the size is in MB.
880 You do not need this option if you use --source.
881
882 --fixed-size: Exact partition size. Value format is the same
883 as for --size option. This option cannot be
884 specified along with --size. If partition data
885 is larger than --fixed-size and error will be
886 raised when assembling disk image.
887
888 --source: This option is a wic-specific option that names the
889 source of the data that will populate the
890 partition. The most common value for this option
891 is 'rootfs', but can be any value which maps to a
892 valid 'source plugin' (see 'wic help plugins').
893
894 If '--source rootfs' is used, it tells the wic
895 command to create a partition as large as needed
896 and to fill it with the contents of the root
897 filesystem pointed to by the '-r' wic command-line
898 option (or the equivalent rootfs derived from the
899 '-e' command-line option). The filesystem type
900 that will be used to create the partition is driven
901 by the value of the --fstype option specified for
902 the partition (see --fstype below).
903
904 If --source <plugin-name>' is used, it tells the
905 wic command to create a partition as large as
906 needed and to fill with the contents of the
907 partition that will be generated by the specified
908 plugin name using the data pointed to by the '-r'
909 wic command-line option (or the equivalent rootfs
910 derived from the '-e' command-line option).
911 Exactly what those contents and filesystem type end
912 up being are dependent on the given plugin
913 implementation.
914
915 If --source option is not used, the wic command
916 will create empty partition. --size parameter has
917 to be used to specify size of empty partition.
918
919 --sourceparams: This option is specific to wic. Supply additional
920 parameters to the source plugin in
921 key1=value1,key2 format.
922
923 --ondisk or --ondrive: Forces the partition to be created on
924 a particular disk.
925
926 --fstype: Sets the file system type for the partition. These
927 apply to partitions created using '--source rootfs' (see
928 --source above). Valid values are:
929
930 vfat
931 msdos
932 ext2
933 ext3
934 ext4
935 btrfs
936 squashfs
937 erofs
938 swap
939 none
940
941 --fsoptions: Specifies a free-form string of options to be
942 used when mounting the filesystem. This string
943 will be copied into the /etc/fstab file of the
944 installed system and should be enclosed in
945 quotes. If not specified, the default string is
946 "defaults".
947
948 --fspassno: Specifies the order in which filesystem checks are done
949 at boot time by fsck. See fs_passno parameter of
950 fstab(5). This parameter will be copied into the
951 /etc/fstab file of the installed system. If not
952 specified the default value of "0" will be used.
953
954 --label label: Specifies the label to give to the filesystem
955 to be made on the partition. If the given
956 label is already in use by another filesystem,
957 a new label is created for the partition.
958
959 --use-label: This option is specific to wic. It makes wic to use the
960 label in /etc/fstab to specify a partition. If the
961 --use-label and --use-uuid are used at the same time,
962 we prefer the uuid because it is less likely to cause
963 name confliction. We don't support using this parameter
964 on the root partition since it requires an initramfs to
965 parse this value and we do not currently support that.
966
967 --active: Marks the partition as active.
968
969 --align (in KBytes): This option is specific to wic and says
970 to start a partition on an x KBytes
971 boundary.
972
973 --offset: This option is specific to wic that says to place a partition
974 at exactly the specified offset. If the partition cannot be
975 placed at the specified offset, the image build will fail.
976 Specify as an integer value optionally followed by one of the
977 units s/S for 512 byte sector, k/K for kibibyte, M for
978 mebibyte and G for gibibyte. The default unit if none is
979 given is k.
980
981 --no-table: This option is specific to wic. Space will be
982 reserved for the partition and it will be
983 populated but it will not be added to the
984 partition table. It may be useful for
985 bootloaders.
986
987 --exclude-path: This option is specific to wic. It excludes the given
988 relative path from the resulting image. If the path
989 ends with a slash, only the content of the directory
990 is omitted, not the directory itself. This option only
991 has an effect with the rootfs source plugin.
992
993 --include-path: This option is specific to wic. It adds the contents
994 of the given path or a rootfs to the resulting image.
995 The option contains two fields, the origin and the
996 destination. When the origin is a rootfs, it follows
997 the same logic as the rootfs-dir argument and the
998 permissions and owners are kept. When the origin is a
999 path, it is relative to the directory in which wic is
1000 running not the rootfs itself so use of an absolute
1001 path is recommended, and the owner and group is set to
1002 root:root. If no destination is given it is
1003 automatically set to the root of the rootfs. This
1004 option only has an effect with the rootfs source
1005 plugin.
1006
1007 --change-directory: This option is specific to wic. It changes to the
1008 given directory before copying the files. This
1009 option is useful when we want to split a rootfs in
1010 multiple partitions and we want to keep the right
1011 permissions and usernames in all the partitions.
1012
1013 --no-fstab-update: This option is specific to wic. It does not update the
1014 '/etc/fstab' stock file for the given partition.
1015
1016 --extra-space: This option is specific to wic. It adds extra
1017 space after the space filled by the content
1018 of the partition. The final size can go
1019 beyond the size specified by --size.
1020 By default, 10MB. This option cannot be used
1021 with --fixed-size option.
1022
1023 --overhead-factor: This option is specific to wic. The
1024 size of the partition is multiplied by
1025 this factor. It has to be greater than or
1026 equal to 1. The default value is 1.3.
1027 This option cannot be used with --fixed-size
1028 option.
1029
1030 --part-name: This option is specific to wic. It specifies name for GPT partitions.
1031
1032 --part-type: This option is specific to wic. It specifies partition
1033 type GUID for GPT partitions.
1034 List of partition type GUIDS can be found here:
1035 http://en.wikipedia.org/wiki/GUID_Partition_Table#Partition_type_GUIDs
1036
1037 --use-uuid: This option is specific to wic. It makes wic to generate
1038 random globally unique identifier (GUID) for the partition
1039 and use it in bootloader configuration to specify root partition.
1040
1041 --uuid: This option is specific to wic. It specifies partition UUID.
1042 It's useful if preconfigured partition UUID is added to kernel command line
1043 in bootloader configuration before running wic. In this case .wks file can
1044 be generated or modified to set preconfigured parition UUID using this option.
1045
1046 --fsuuid: This option is specific to wic. It specifies filesystem UUID.
1047 It's useful if preconfigured filesystem UUID is added to kernel command line
1048 in bootloader configuration before running wic. In this case .wks file can
1049 be generated or modified to set preconfigured filesystem UUID using this option.
1050
1051 --system-id: This option is specific to wic. It specifies partition system id. It's useful
1052 for the harware that requires non-default partition system ids. The parameter
1053 in one byte long hex number either with 0x prefix or without it.
1054
1055 --mkfs-extraopts: This option specifies extra options to pass to mkfs utility.
1056 NOTE, that wic uses default options for some filesystems, for example
1057 '-S 512' for mkfs.fat or '-F -i 8192' for mkfs.ext. Those options will
1058 not take effect when --mkfs-extraopts is used. This should be taken into
1059 account when using --mkfs-extraopts.
1060
1061 --type: This option is specific to wic. Valid values are 'primary',
1062 'logical'. For msdos partition tables, this option specifies
1063 the partition type.
1064
1065 --hidden: This option is specific to wic. This option sets the
1066 RequiredPartition bit (bit 0) on GPT partitions.
1067
1068 --mbr: This option is specific to wic. This option is used with the
1069 gpt-hybrid partition type that uses both a GPT partition and
1070 an MBR header. Partitions with this flag will be included in
1071 this MBR header.
1072
1073 * bootloader
1074
1075 This command allows the user to specify various bootloader
1076 options. The following are supported 'bootloader' options:
1077
1078 --timeout: Specifies the number of seconds before the
1079 bootloader times out and boots the default option.
1080
1081 --append: Specifies kernel parameters. These will be added to
1082 bootloader command-line - for example, the syslinux
1083 APPEND or grub kernel command line.
1084
1085 --configfile: Specifies a user defined configuration file for
1086 the bootloader. This file must be located in the
1087 canned-wks folder or could be the full path to the
1088 file. Using this option will override any other
1089 bootloader option.
1090
1091 --ptable: Specifies the partition table format. Valid values are
1092 'msdos', 'gpt', 'gpt-hybrid'.
1093
1094 --source: Specifies the source plugin. If not specified, the
1095 --source value will be copied from the partition that has
1096 /boot as mountpoint.
1097
1098 Note that bootloader functionality and boot partitions are
1099 implemented by the various --source plugins that implement
1100 bootloader functionality; the bootloader command essentially
1101 provides a means of modifying bootloader configuration.
1102
1103 * include
1104
1105 This command allows the user to include the content of .wks file
1106 into original .wks file.
1107
1108 Command uses the following syntax:
1109
1110 include <file>
1111
1112 The <file> is either path to the file or its name. If name is
1113 specified wic will try to find file in the directories with canned
1114 .wks files.
1115
1116"""
1117
1118wic_help_help = """
1119NAME
1120 wic help - display a help topic
1121
1122DESCRIPTION
1123 Specify a help topic to display it. Topics are shown above.
1124"""
1125
1126
1127wic_help = """
1128Creates a customized OpenEmbedded image.
1129
1130Usage: wic [--version]
1131 wic help [COMMAND or TOPIC]
1132 wic COMMAND [ARGS]
1133
1134 usage 1: Returns the current version of Wic
1135 usage 2: Returns detailed help for a COMMAND or TOPIC
1136 usage 3: Executes COMMAND
1137
1138
1139COMMAND:
1140
1141 list - List available canned images and source plugins
1142 ls - List contents of partitioned image or partition
1143 rm - Remove files or directories from the vfat or ext* partitions
1144 help - Show help for a wic COMMAND or TOPIC
1145 write - Write an image to a device
1146 cp - Copy files and directories to the vfat or ext* partitions
1147 create - Create a new OpenEmbedded image
1148
1149
1150TOPIC:
1151 overview - Presents an overall overview of Wic
1152 plugins - Presents an overview and API for Wic plugins
1153 kickstart - Presents a Wic kickstart file reference
1154
1155
1156Examples:
1157
1158 $ wic --version
1159
1160 Returns the current version of Wic
1161
1162
1163 $ wic help cp
1164
1165 Returns the SYNOPSIS and DESCRIPTION for the Wic "cp" command.
1166
1167
1168 $ wic list images
1169
1170 Returns the list of canned images (i.e. *.wks files located in
1171 the /scripts/lib/wic/canned-wks directory.
1172
1173
1174 $ wic create mkefidisk -e core-image-minimal
1175
1176 Creates an EFI disk image from artifacts used in a previous
1177 core-image-minimal build in standard BitBake locations
1178 (e.g. Cooked Mode).
1179
1180"""
diff --git a/scripts/lib/wic/ksparser.py b/scripts/lib/wic/ksparser.py
deleted file mode 100644
index 7ef3dc83dd..0000000000
--- a/scripts/lib/wic/ksparser.py
+++ /dev/null
@@ -1,298 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright (c) 2016 Intel, Inc.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7# DESCRIPTION
8# This module provides parser for kickstart format
9#
10# AUTHORS
11# Ed Bartosh <ed.bartosh> (at] linux.intel.com>
12
13"""Kickstart parser module."""
14
15import os
16import shlex
17import logging
18import re
19
20from argparse import ArgumentParser, ArgumentError, ArgumentTypeError
21
22from wic.engine import find_canned
23from wic.partition import Partition
24from wic.misc import get_bitbake_var
25
26logger = logging.getLogger('wic')
27
28__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
29
30def expand_line(line):
31 while True:
32 m = __expand_var_regexp__.search(line)
33 if not m:
34 return line
35 key = m.group()[2:-1]
36 val = get_bitbake_var(key)
37 if val is None:
38 logger.warning("cannot expand variable %s" % key)
39 return line
40 line = line[:m.start()] + val + line[m.end():]
41
42class KickStartError(Exception):
43 """Custom exception."""
44 pass
45
46class KickStartParser(ArgumentParser):
47 """
48 This class overwrites error method to throw exception
49 instead of producing usage message(default argparse behavior).
50 """
51 def error(self, message):
52 raise ArgumentError(None, message)
53
54def sizetype(default, size_in_bytes=False):
55 def f(arg):
56 """
57 Custom type for ArgumentParser
58 Converts size string in <num>[S|s|K|k|M|G] format into the integer value
59 """
60 try:
61 suffix = default
62 size = int(arg)
63 except ValueError:
64 try:
65 suffix = arg[-1:]
66 size = int(arg[:-1])
67 except ValueError:
68 raise ArgumentTypeError("Invalid size: %r" % arg)
69
70
71 if size_in_bytes:
72 if suffix == 's' or suffix == 'S':
73 return size * 512
74 mult = 1024
75 else:
76 mult = 1
77
78 if suffix == "k" or suffix == "K":
79 return size * mult
80 if suffix == "M":
81 return size * mult * 1024
82 if suffix == "G":
83 return size * mult * 1024 * 1024
84
85 raise ArgumentTypeError("Invalid size: %r" % arg)
86 return f
87
88def overheadtype(arg):
89 """
90 Custom type for ArgumentParser
91 Converts overhead string to float and checks if it's bigger than 1.0
92 """
93 try:
94 result = float(arg)
95 except ValueError:
96 raise ArgumentTypeError("Invalid value: %r" % arg)
97
98 if result < 1.0:
99 raise ArgumentTypeError("Overhead factor should be > 1.0" % arg)
100
101 return result
102
103def cannedpathtype(arg):
104 """
105 Custom type for ArgumentParser
106 Tries to find file in the list of canned wks paths
107 """
108 scripts_path = os.path.abspath(os.path.dirname(__file__) + '../../..')
109 result = find_canned(scripts_path, arg)
110 if not result:
111 raise ArgumentTypeError("file not found: %s" % arg)
112 return result
113
114def systemidtype(arg):
115 """
116 Custom type for ArgumentParser
117 Checks if the argument sutisfies system id requirements,
118 i.e. if it's one byte long integer > 0
119 """
120 error = "Invalid system type: %s. must be hex "\
121 "between 0x1 and 0xFF" % arg
122 try:
123 result = int(arg, 16)
124 except ValueError:
125 raise ArgumentTypeError(error)
126
127 if result <= 0 or result > 0xff:
128 raise ArgumentTypeError(error)
129
130 return arg
131
132class KickStart():
133 """Kickstart parser implementation."""
134
135 DEFAULT_EXTRA_SPACE = 10*1024
136 DEFAULT_OVERHEAD_FACTOR = 1.3
137
138 def __init__(self, confpath):
139
140 self.partitions = []
141 self.bootloader = None
142 self.lineno = 0
143 self.partnum = 0
144
145 parser = KickStartParser()
146 subparsers = parser.add_subparsers()
147
148 part = subparsers.add_parser('part')
149 part.add_argument('mountpoint', nargs='?')
150 part.add_argument('--active', action='store_true')
151 part.add_argument('--align', type=int)
152 part.add_argument('--offset', type=sizetype("K", True))
153 part.add_argument('--exclude-path', nargs='+')
154 part.add_argument('--include-path', nargs='+', action='append')
155 part.add_argument('--change-directory')
156 part.add_argument("--extra-space", type=sizetype("M"))
157 part.add_argument('--fsoptions', dest='fsopts')
158 part.add_argument('--fspassno', dest='fspassno')
159 part.add_argument('--fstype', default='vfat',
160 choices=('ext2', 'ext3', 'ext4', 'btrfs',
161 'squashfs', 'vfat', 'msdos', 'erofs',
162 'swap', 'none'))
163 part.add_argument('--mkfs-extraopts', default='')
164 part.add_argument('--label')
165 part.add_argument('--use-label', action='store_true')
166 part.add_argument('--no-table', action='store_true')
167 part.add_argument('--ondisk', '--ondrive', dest='disk', default='sda')
168 part.add_argument("--overhead-factor", type=overheadtype)
169 part.add_argument('--part-name')
170 part.add_argument('--part-type')
171 part.add_argument('--rootfs-dir')
172 part.add_argument('--type', default='primary',
173 choices = ('primary', 'logical'))
174 part.add_argument('--hidden', action='store_true')
175
176 # --size and --fixed-size cannot be specified together; options
177 # ----extra-space and --overhead-factor should also raise a parser
178 # --error, but since nesting mutually exclusive groups does not work,
179 # ----extra-space/--overhead-factor are handled later
180 sizeexcl = part.add_mutually_exclusive_group()
181 sizeexcl.add_argument('--size', type=sizetype("M"), default=0)
182 sizeexcl.add_argument('--fixed-size', type=sizetype("M"), default=0)
183
184 part.add_argument('--source')
185 part.add_argument('--sourceparams')
186 part.add_argument('--system-id', type=systemidtype)
187 part.add_argument('--use-uuid', action='store_true')
188 part.add_argument('--uuid')
189 part.add_argument('--fsuuid')
190 part.add_argument('--no-fstab-update', action='store_true')
191 part.add_argument('--mbr', action='store_true')
192
193 bootloader = subparsers.add_parser('bootloader')
194 bootloader.add_argument('--append')
195 bootloader.add_argument('--configfile')
196 bootloader.add_argument('--ptable', choices=('msdos', 'gpt', 'gpt-hybrid'),
197 default='msdos')
198 bootloader.add_argument('--timeout', type=int)
199 bootloader.add_argument('--source')
200
201 include = subparsers.add_parser('include')
202 include.add_argument('path', type=cannedpathtype)
203
204 self._parse(parser, confpath)
205 if not self.bootloader:
206 logger.warning('bootloader config not specified, using defaults\n')
207 self.bootloader = bootloader.parse_args([])
208
209 def _parse(self, parser, confpath):
210 """
211 Parse file in .wks format using provided parser.
212 """
213 with open(confpath) as conf:
214 lineno = 0
215 for line in conf:
216 line = line.strip()
217 lineno += 1
218 if line and line[0] != '#':
219 line = expand_line(line)
220 try:
221 line_args = shlex.split(line)
222 parsed = parser.parse_args(line_args)
223 except ArgumentError as err:
224 raise KickStartError('%s:%d: %s' % \
225 (confpath, lineno, err))
226 if line.startswith('part'):
227 # SquashFS does not support filesystem UUID
228 if parsed.fstype == 'squashfs':
229 if parsed.fsuuid:
230 err = "%s:%d: SquashFS does not support UUID" \
231 % (confpath, lineno)
232 raise KickStartError(err)
233 if parsed.label:
234 err = "%s:%d: SquashFS does not support LABEL" \
235 % (confpath, lineno)
236 raise KickStartError(err)
237 # erofs does not support filesystem labels
238 if parsed.fstype == 'erofs' and parsed.label:
239 err = "%s:%d: erofs does not support LABEL" % (confpath, lineno)
240 raise KickStartError(err)
241 if parsed.fstype == 'msdos' or parsed.fstype == 'vfat':
242 if parsed.fsuuid:
243 if parsed.fsuuid.upper().startswith('0X'):
244 if len(parsed.fsuuid) > 10:
245 err = "%s:%d: fsuuid %s given in wks kickstart file " \
246 "exceeds the length limit for %s filesystem. " \
247 "It should be in the form of a 32 bit hexadecimal" \
248 "number (for example, 0xABCD1234)." \
249 % (confpath, lineno, parsed.fsuuid, parsed.fstype)
250 raise KickStartError(err)
251 elif len(parsed.fsuuid) > 8:
252 err = "%s:%d: fsuuid %s given in wks kickstart file " \
253 "exceeds the length limit for %s filesystem. " \
254 "It should be in the form of a 32 bit hexadecimal" \
255 "number (for example, 0xABCD1234)." \
256 % (confpath, lineno, parsed.fsuuid, parsed.fstype)
257 raise KickStartError(err)
258 if parsed.use_label and not parsed.label:
259 err = "%s:%d: Must set the label with --label" \
260 % (confpath, lineno)
261 raise KickStartError(err)
262 # using ArgumentParser one cannot easily tell if option
263 # was passed as argument, if said option has a default
264 # value; --overhead-factor/--extra-space cannot be used
265 # with --fixed-size, so at least detect when these were
266 # passed with non-0 values ...
267 if parsed.fixed_size:
268 if parsed.overhead_factor or parsed.extra_space:
269 err = "%s:%d: arguments --overhead-factor and --extra-space not "\
270 "allowed with argument --fixed-size" \
271 % (confpath, lineno)
272 raise KickStartError(err)
273 else:
274 # ... and provide defaults if not using
275 # --fixed-size iff given option was not used
276 # (again, one cannot tell if option was passed but
277 # with value equal to 0)
278 if '--overhead-factor' not in line_args:
279 parsed.overhead_factor = self.DEFAULT_OVERHEAD_FACTOR
280 if '--extra-space' not in line_args:
281 parsed.extra_space = self.DEFAULT_EXTRA_SPACE
282
283 self.partnum += 1
284 self.partitions.append(Partition(parsed, self.partnum))
285 elif line.startswith('include'):
286 self._parse(parser, parsed.path)
287 elif line.startswith('bootloader'):
288 if not self.bootloader:
289 self.bootloader = parsed
290 # Concatenate the strings set in APPEND
291 append_var = get_bitbake_var("APPEND")
292 if append_var:
293 self.bootloader.append = ' '.join(filter(None, \
294 (self.bootloader.append, append_var)))
295 else:
296 err = "%s:%d: more than one bootloader specified" \
297 % (confpath, lineno)
298 raise KickStartError(err)
diff --git a/scripts/lib/wic/misc.py b/scripts/lib/wic/misc.py
deleted file mode 100644
index 1a7c140fa6..0000000000
--- a/scripts/lib/wic/misc.py
+++ /dev/null
@@ -1,266 +0,0 @@
1#
2# Copyright (c) 2013, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7# This module provides a place to collect various wic-related utils
8# for the OpenEmbedded Image Tools.
9#
10# AUTHORS
11# Tom Zanussi <tom.zanussi (at] linux.intel.com>
12#
13"""Miscellaneous functions."""
14
15import logging
16import os
17import re
18import subprocess
19import shutil
20
21from collections import defaultdict
22
23from wic import WicError
24
25logger = logging.getLogger('wic')
26
27# executable -> recipe pairs for exec_native_cmd
28NATIVE_RECIPES = {"bmaptool": "bmaptool",
29 "dumpe2fs": "e2fsprogs",
30 "grub-mkimage": "grub-efi",
31 "isohybrid": "syslinux",
32 "mcopy": "mtools",
33 "mdel" : "mtools",
34 "mdeltree" : "mtools",
35 "mdir" : "mtools",
36 "mkdosfs": "dosfstools",
37 "mkisofs": "cdrtools",
38 "mkfs.btrfs": "btrfs-tools",
39 "mkfs.erofs": "erofs-utils",
40 "mkfs.ext2": "e2fsprogs",
41 "mkfs.ext3": "e2fsprogs",
42 "mkfs.ext4": "e2fsprogs",
43 "mkfs.vfat": "dosfstools",
44 "mksquashfs": "squashfs-tools",
45 "mkswap": "util-linux",
46 "mmd": "mtools",
47 "parted": "parted",
48 "sfdisk": "util-linux",
49 "sgdisk": "gptfdisk",
50 "syslinux": "syslinux",
51 "tar": "tar"
52 }
53
54def runtool(cmdln_or_args):
55 """ wrapper for most of the subprocess calls
56 input:
57 cmdln_or_args: can be both args and cmdln str (shell=True)
58 return:
59 rc, output
60 """
61 if isinstance(cmdln_or_args, list):
62 cmd = cmdln_or_args[0]
63 shell = False
64 else:
65 import shlex
66 cmd = shlex.split(cmdln_or_args)[0]
67 shell = True
68
69 sout = subprocess.PIPE
70 serr = subprocess.STDOUT
71
72 try:
73 process = subprocess.Popen(cmdln_or_args, stdout=sout,
74 stderr=serr, shell=shell)
75 sout, serr = process.communicate()
76 # combine stdout and stderr, filter None out and decode
77 out = ''.join([out.decode('utf-8') for out in [sout, serr] if out])
78 except OSError as err:
79 if err.errno == 2:
80 # [Errno 2] No such file or directory
81 raise WicError('Cannot run command: %s, lost dependency?' % cmd)
82 else:
83 raise # relay
84
85 return process.returncode, out
86
87def _exec_cmd(cmd_and_args, as_shell=False):
88 """
89 Execute command, catching stderr, stdout
90
91 Need to execute as_shell if the command uses wildcards
92 """
93 logger.debug("_exec_cmd: %s", cmd_and_args)
94 args = cmd_and_args.split()
95 logger.debug(args)
96
97 if as_shell:
98 ret, out = runtool(cmd_and_args)
99 else:
100 ret, out = runtool(args)
101 out = out.strip()
102 if ret != 0:
103 raise WicError("_exec_cmd: %s returned '%s' instead of 0\noutput: %s" % \
104 (cmd_and_args, ret, out))
105
106 logger.debug("_exec_cmd: output for %s (rc = %d): %s",
107 cmd_and_args, ret, out)
108
109 return ret, out
110
111
112def exec_cmd(cmd_and_args, as_shell=False):
113 """
114 Execute command, return output
115 """
116 return _exec_cmd(cmd_and_args, as_shell)[1]
117
118def find_executable(cmd, paths):
119 recipe = cmd
120 if recipe in NATIVE_RECIPES:
121 recipe = NATIVE_RECIPES[recipe]
122 provided = get_bitbake_var("ASSUME_PROVIDED")
123 if provided and "%s-native" % recipe in provided:
124 return True
125
126 return shutil.which(cmd, path=paths)
127
128def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""):
129 """
130 Execute native command, catching stderr, stdout
131
132 Need to execute as_shell if the command uses wildcards
133
134 Always need to execute native commands as_shell
135 """
136 # The reason -1 is used is because there may be "export" commands.
137 args = cmd_and_args.split(';')[-1].split()
138 logger.debug(args)
139
140 if pseudo:
141 cmd_and_args = pseudo + cmd_and_args
142
143 hosttools_dir = get_bitbake_var("HOSTTOOLS_DIR")
144 target_sys = get_bitbake_var("TARGET_SYS")
145
146 native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/usr/bin/%s:%s/bin:%s" % \
147 (native_sysroot, native_sysroot,
148 native_sysroot, native_sysroot, target_sys,
149 native_sysroot, hosttools_dir)
150
151 native_cmd_and_args = "export PATH=%s:$PATH;%s" % \
152 (native_paths, cmd_and_args)
153 logger.debug("exec_native_cmd: %s", native_cmd_and_args)
154
155 # If the command isn't in the native sysroot say we failed.
156 if find_executable(args[0], native_paths):
157 ret, out = _exec_cmd(native_cmd_and_args, True)
158 else:
159 ret = 127
160 out = "can't find native executable %s in %s" % (args[0], native_paths)
161
162 prog = args[0]
163 # shell command-not-found
164 if ret == 127 \
165 or (pseudo and ret == 1 and out == "Can't find '%s' in $PATH." % prog):
166 msg = "A native program %s required to build the image "\
167 "was not found (see details above).\n\n" % prog
168 recipe = NATIVE_RECIPES.get(prog)
169 if recipe:
170 msg += "Please make sure wic-tools have %s-native in its DEPENDS, "\
171 "build it with 'bitbake wic-tools' and try again.\n" % recipe
172 else:
173 msg += "Wic failed to find a recipe to build native %s. Please "\
174 "file a bug against wic.\n" % prog
175 raise WicError(msg)
176
177 return ret, out
178
179BOOTDD_EXTRA_SPACE = 16384
180
181class BitbakeVars(defaultdict):
182 """
183 Container for Bitbake variables.
184 """
185 def __init__(self):
186 defaultdict.__init__(self, dict)
187
188 # default_image and vars_dir attributes should be set from outside
189 self.default_image = None
190 self.vars_dir = None
191
192 def _parse_line(self, line, image, matcher=re.compile(r"^([a-zA-Z0-9\-_+./~]+)=(.*)")):
193 """
194 Parse one line from bitbake -e output or from .env file.
195 Put result key-value pair into the storage.
196 """
197 if "=" not in line:
198 return
199 match = matcher.match(line)
200 if not match:
201 return
202 key, val = match.groups()
203 self[image][key] = val.strip('"')
204
205 def get_var(self, var, image=None, cache=True):
206 """
207 Get bitbake variable from 'bitbake -e' output or from .env file.
208 This is a lazy method, i.e. it runs bitbake or parses file only when
209 only when variable is requested. It also caches results.
210 """
211 if not image:
212 image = self.default_image
213
214 if image not in self:
215 if image and self.vars_dir:
216 fname = os.path.join(self.vars_dir, image + '.env')
217 if os.path.isfile(fname):
218 # parse .env file
219 with open(fname) as varsfile:
220 for line in varsfile:
221 self._parse_line(line, image)
222 else:
223 print("Couldn't get bitbake variable from %s." % fname)
224 print("File %s doesn't exist." % fname)
225 return
226 else:
227 # Get bitbake -e output
228 cmd = "bitbake -e"
229 if image:
230 cmd += " %s" % image
231
232 log_level = logger.getEffectiveLevel()
233 logger.setLevel(logging.INFO)
234 ret, lines = _exec_cmd(cmd)
235 logger.setLevel(log_level)
236
237 if ret:
238 logger.error("Couldn't get '%s' output.", cmd)
239 logger.error("Bitbake failed with error:\n%s\n", lines)
240 return
241
242 # Parse bitbake -e output
243 for line in lines.split('\n'):
244 self._parse_line(line, image)
245
246 # Make first image a default set of variables
247 if cache:
248 images = [key for key in self if key]
249 if len(images) == 1:
250 self[None] = self[image]
251
252 result = self[image].get(var)
253 if not cache:
254 self.pop(image, None)
255
256 return result
257
258# Create BB_VARS singleton
259BB_VARS = BitbakeVars()
260
261def get_bitbake_var(var, image=None, cache=True):
262 """
263 Provide old get_bitbake_var API by wrapping
264 get_var method of BB_VARS singleton.
265 """
266 return BB_VARS.get_var(var, image, cache)
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py
deleted file mode 100644
index 5b51ab214f..0000000000
--- a/scripts/lib/wic/partition.py
+++ /dev/null
@@ -1,551 +0,0 @@
1#
2# Copyright (c) 2013-2016 Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7# This module provides the OpenEmbedded partition object definitions.
8#
9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
11# Ed Bartosh <ed.bartosh> (at] linux.intel.com>
12
13import logging
14import os
15import uuid
16
17from wic import WicError
18from wic.misc import exec_cmd, exec_native_cmd, get_bitbake_var
19from wic.pluginbase import PluginMgr
20
21logger = logging.getLogger('wic')
22
23class Partition():
24
25 def __init__(self, args, lineno):
26 self.args = args
27 self.active = args.active
28 self.align = args.align
29 self.disk = args.disk
30 self.device = None
31 self.extra_space = args.extra_space
32 self.exclude_path = args.exclude_path
33 self.include_path = args.include_path
34 self.change_directory = args.change_directory
35 self.fsopts = args.fsopts
36 self.fspassno = args.fspassno
37 self.fstype = args.fstype
38 self.label = args.label
39 self.use_label = args.use_label
40 self.mkfs_extraopts = args.mkfs_extraopts
41 self.mountpoint = args.mountpoint
42 self.no_table = args.no_table
43 self.num = None
44 self.offset = args.offset
45 self.overhead_factor = args.overhead_factor
46 self.part_name = args.part_name
47 self.part_type = args.part_type
48 self.rootfs_dir = args.rootfs_dir
49 self.size = args.size
50 self.fixed_size = args.fixed_size
51 self.source = args.source
52 self.sourceparams = args.sourceparams
53 self.system_id = args.system_id
54 self.use_uuid = args.use_uuid
55 self.uuid = args.uuid
56 self.fsuuid = args.fsuuid
57 self.type = args.type
58 self.no_fstab_update = args.no_fstab_update
59 self.updated_fstab_path = None
60 self.has_fstab = False
61 self.update_fstab_in_rootfs = False
62 self.hidden = args.hidden
63 self.mbr = args.mbr
64
65 self.lineno = lineno
66 self.source_file = ""
67
68 def get_extra_block_count(self, current_blocks):
69 """
70 The --size param is reflected in self.size (in kB), and we already
71 have current_blocks (1k) blocks, calculate and return the
72 number of (1k) blocks we need to add to get to --size, 0 if
73 we're already there or beyond.
74 """
75 logger.debug("Requested partition size for %s: %d",
76 self.mountpoint, self.size)
77
78 if not self.size:
79 return 0
80
81 requested_blocks = self.size
82
83 logger.debug("Requested blocks %d, current_blocks %d",
84 requested_blocks, current_blocks)
85
86 if requested_blocks > current_blocks:
87 return requested_blocks - current_blocks
88 else:
89 return 0
90
91 def get_rootfs_size(self, actual_rootfs_size=0):
92 """
93 Calculate the required size of rootfs taking into consideration
94 --size/--fixed-size flags as well as overhead and extra space, as
95 specified in kickstart file. Raises an error if the
96 `actual_rootfs_size` is larger than fixed-size rootfs.
97
98 """
99 if self.fixed_size:
100 rootfs_size = self.fixed_size
101 if actual_rootfs_size > rootfs_size:
102 raise WicError("Actual rootfs size (%d kB) is larger than "
103 "allowed size %d kB" %
104 (actual_rootfs_size, rootfs_size))
105 else:
106 extra_blocks = self.get_extra_block_count(actual_rootfs_size)
107 if extra_blocks < self.extra_space:
108 extra_blocks = self.extra_space
109
110 rootfs_size = actual_rootfs_size + extra_blocks
111 rootfs_size = int(rootfs_size * self.overhead_factor)
112
113 logger.debug("Added %d extra blocks to %s to get to %d total blocks",
114 extra_blocks, self.mountpoint, rootfs_size)
115
116 return rootfs_size
117
118 @property
119 def disk_size(self):
120 """
121 Obtain on-disk size of partition taking into consideration
122 --size/--fixed-size options.
123
124 """
125 return self.fixed_size if self.fixed_size else self.size
126
127 def prepare(self, creator, cr_workdir, oe_builddir, rootfs_dir,
128 bootimg_dir, kernel_dir, native_sysroot, updated_fstab_path):
129 """
130 Prepare content for individual partitions, depending on
131 partition command parameters.
132 """
133 self.updated_fstab_path = updated_fstab_path
134 if self.updated_fstab_path and not (self.fstype.startswith("ext") or self.fstype == "msdos"):
135 self.update_fstab_in_rootfs = True
136
137 if not self.source:
138 if self.fstype == "none" or self.no_table:
139 return
140 if not self.size and not self.fixed_size:
141 raise WicError("The %s partition has a size of zero. Please "
142 "specify a non-zero --size/--fixed-size for that "
143 "partition." % self.mountpoint)
144
145 if self.fstype == "swap":
146 self.prepare_swap_partition(cr_workdir, oe_builddir,
147 native_sysroot)
148 self.source_file = "%s/fs.%s" % (cr_workdir, self.fstype)
149 else:
150 if self.fstype in ('squashfs', 'erofs'):
151 raise WicError("It's not possible to create empty %s "
152 "partition '%s'" % (self.fstype, self.mountpoint))
153
154 rootfs = "%s/fs_%s.%s.%s" % (cr_workdir, self.label,
155 self.lineno, self.fstype)
156 if os.path.isfile(rootfs):
157 os.remove(rootfs)
158
159 prefix = "ext" if self.fstype.startswith("ext") else self.fstype
160 method = getattr(self, "prepare_empty_partition_" + prefix)
161 method(rootfs, oe_builddir, native_sysroot)
162 self.source_file = rootfs
163 return
164
165 plugins = PluginMgr.get_plugins('source')
166
167 if self.source not in plugins:
168 raise WicError("The '%s' --source specified for %s doesn't exist.\n\t"
169 "See 'wic list source-plugins' for a list of available"
170 " --sources.\n\tSee 'wic help source-plugins' for "
171 "details on adding a new source plugin." %
172 (self.source, self.mountpoint))
173
174 srcparams_dict = {}
175 if self.sourceparams:
176 # Split sourceparams string of the form key1=val1[,key2=val2,...]
177 # into a dict. Also accepts valueless keys i.e. without =
178 splitted = self.sourceparams.split(',')
179 srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par)
180
181 plugin = PluginMgr.get_plugins('source')[self.source]
182 plugin.do_configure_partition(self, srcparams_dict, creator,
183 cr_workdir, oe_builddir, bootimg_dir,
184 kernel_dir, native_sysroot)
185 plugin.do_stage_partition(self, srcparams_dict, creator,
186 cr_workdir, oe_builddir, bootimg_dir,
187 kernel_dir, native_sysroot)
188 plugin.do_prepare_partition(self, srcparams_dict, creator,
189 cr_workdir, oe_builddir, bootimg_dir,
190 kernel_dir, rootfs_dir, native_sysroot)
191 plugin.do_post_partition(self, srcparams_dict, creator,
192 cr_workdir, oe_builddir, bootimg_dir,
193 kernel_dir, rootfs_dir, native_sysroot)
194
195 # further processing required Partition.size to be an integer, make
196 # sure that it is one
197 if not isinstance(self.size, int):
198 raise WicError("Partition %s internal size is not an integer. "
199 "This a bug in source plugin %s and needs to be fixed." %
200 (self.mountpoint, self.source))
201
202 if self.fixed_size and self.size > self.fixed_size:
203 raise WicError("File system image of partition %s is "
204 "larger (%d kB) than its allowed size %d kB" %
205 (self.mountpoint, self.size, self.fixed_size))
206
207 def prepare_rootfs(self, cr_workdir, oe_builddir, rootfs_dir,
208 native_sysroot, real_rootfs = True, pseudo_dir = None):
209 """
210 Prepare content for a rootfs partition i.e. create a partition
211 and fill it from a /rootfs dir.
212
213 Currently handles ext2/3/4, btrfs, vfat and squashfs.
214 """
215
216 rootfs = "%s/rootfs_%s.%s.%s" % (cr_workdir, self.label,
217 self.lineno, self.fstype)
218 if os.path.isfile(rootfs):
219 os.remove(rootfs)
220
221 p_prefix = os.environ.get("PSEUDO_PREFIX", "%s/usr" % native_sysroot)
222 if (pseudo_dir):
223 # Canonicalize the ignore paths. This corresponds to
224 # calling oe.path.canonicalize(), which is used in bitbake.conf.
225 include_paths = [rootfs_dir] + (get_bitbake_var("PSEUDO_INCLUDE_PATHS") or "").split(",")
226 canonical_paths = []
227 for path in include_paths:
228 if "$" not in path:
229 trailing_slash = path.endswith("/") and "/" or ""
230 canonical_paths.append(os.path.realpath(path) + trailing_slash)
231 include_paths = ",".join(canonical_paths)
232
233 pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix
234 pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir
235 pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir
236 pseudo += "export PSEUDO_NOSYMLINKEXP=1;"
237 pseudo += "export PSEUDO_INCLUDE_PATHS=%s;" % include_paths
238 pseudo += "%s " % get_bitbake_var("FAKEROOTCMD")
239 else:
240 pseudo = None
241
242 if not self.size and real_rootfs:
243 # The rootfs size is not set in .ks file so try to get it
244 # from bitbake variable
245 rsize_bb = get_bitbake_var('ROOTFS_SIZE')
246 rdir = get_bitbake_var('IMAGE_ROOTFS')
247 if rsize_bb and (rdir == rootfs_dir or (rootfs_dir.split('/')[-2] == "tmp-wic" and rootfs_dir.split('/')[-1][:6] == "rootfs")):
248 # Bitbake variable ROOTFS_SIZE is calculated in
249 # Image._get_rootfs_size method from meta/lib/oe/image.py
250 # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
251 # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE
252 self.size = int(round(float(rsize_bb)))
253 else:
254 # Bitbake variable ROOTFS_SIZE is not defined so compute it
255 # from the rootfs_dir size using the same logic found in
256 # get_rootfs_size() from meta/classes/image.bbclass
257 du_cmd = "du -ks %s" % rootfs_dir
258 out = exec_cmd(du_cmd)
259 self.size = int(out.split()[0])
260
261 prefix = "ext" if self.fstype.startswith("ext") else self.fstype
262 method = getattr(self, "prepare_rootfs_" + prefix)
263 method(rootfs, cr_workdir, oe_builddir, rootfs_dir, native_sysroot, pseudo)
264 self.source_file = rootfs
265
266 # get the rootfs size in the right units for kickstart (kB)
267 du_cmd = "du -Lbks %s" % rootfs
268 out = exec_cmd(du_cmd)
269 self.size = int(out.split()[0])
270
271 def prepare_rootfs_ext(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
272 native_sysroot, pseudo):
273 """
274 Prepare content for an ext2/3/4 rootfs partition.
275 """
276 du_cmd = "du -ks %s" % rootfs_dir
277 out = exec_cmd(du_cmd)
278 actual_rootfs_size = int(out.split()[0])
279
280 rootfs_size = self.get_rootfs_size(actual_rootfs_size)
281
282 with open(rootfs, 'w') as sparse:
283 os.ftruncate(sparse.fileno(), rootfs_size * 1024)
284
285 extraopts = self.mkfs_extraopts or "-F -i 8192"
286
287 # use hash_seed to generate reproducible ext4 images
288 (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo)
289
290 label_str = ""
291 if self.label:
292 label_str = "-L %s" % self.label
293
294 mkfs_cmd = "mkfs.%s %s %s %s -U %s -d %s" % \
295 (self.fstype, extraopts, rootfs, label_str, self.fsuuid, rootfs_dir)
296 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
297
298 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
299 debugfs_script_path = os.path.join(cr_workdir, "debugfs_script")
300 with open(debugfs_script_path, "w") as f:
301 f.write("cd etc\n")
302 f.write("rm fstab\n")
303 f.write("write %s fstab\n" % (self.updated_fstab_path))
304 debugfs_cmd = "debugfs -w -f %s %s" % (debugfs_script_path, rootfs)
305 exec_native_cmd(debugfs_cmd, native_sysroot)
306
307 mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs)
308 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
309
310 if os.getenv('SOURCE_DATE_EPOCH'):
311 sde_time = hex(int(os.getenv('SOURCE_DATE_EPOCH')))
312 debugfs_script_path = os.path.join(cr_workdir, "debugfs_script")
313 files = []
314 for root, dirs, others in os.walk(rootfs_dir):
315 base = root.replace(rootfs_dir, "").rstrip(os.sep)
316 files += [ "/" if base == "" else base ]
317 files += [ base + "/" + n for n in dirs + others ]
318 with open(debugfs_script_path, "w") as f:
319 f.write("set_current_time %s\n" % (sde_time))
320 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
321 f.write("set_inode_field /etc/fstab mtime %s\n" % (sde_time))
322 f.write("set_inode_field /etc/fstab mtime_extra 0\n")
323 for file in set(files):
324 for time in ["atime", "ctime", "crtime"]:
325 f.write("set_inode_field \"%s\" %s %s\n" % (file, time, sde_time))
326 f.write("set_inode_field \"%s\" %s_extra 0\n" % (file, time))
327 for time in ["wtime", "mkfs_time", "lastcheck"]:
328 f.write("set_super_value %s %s\n" % (time, sde_time))
329 for time in ["mtime", "first_error_time", "last_error_time"]:
330 f.write("set_super_value %s 0\n" % (time))
331 debugfs_cmd = "debugfs -w -f %s %s" % (debugfs_script_path, rootfs)
332 exec_native_cmd(debugfs_cmd, native_sysroot)
333
334 self.check_for_Y2038_problem(rootfs, native_sysroot)
335
336 def get_hash_seed_ext4(self, extraopts, pseudo):
337 if os.getenv('SOURCE_DATE_EPOCH'):
338 sde_time = int(os.getenv('SOURCE_DATE_EPOCH'))
339 if pseudo:
340 pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo)
341 else:
342 pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time
343
344 # Set hash_seed to generate deterministic directory indexes
345 namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460")
346 if self.fsuuid:
347 namespace = uuid.UUID(self.fsuuid)
348 hash_seed = str(uuid.uuid5(namespace, str(sde_time)))
349 extraopts += " -E hash_seed=%s" % hash_seed
350
351 return (extraopts, pseudo)
352
353 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
354 native_sysroot, pseudo):
355 """
356 Prepare content for a btrfs rootfs partition.
357 """
358 du_cmd = "du -ks %s" % rootfs_dir
359 out = exec_cmd(du_cmd)
360 actual_rootfs_size = int(out.split()[0])
361
362 rootfs_size = self.get_rootfs_size(actual_rootfs_size)
363
364 with open(rootfs, 'w') as sparse:
365 os.ftruncate(sparse.fileno(), rootfs_size * 1024)
366
367 label_str = ""
368 if self.label:
369 label_str = "-L %s" % self.label
370
371 mkfs_cmd = "mkfs.%s -b %d -r %s %s %s -U %s %s" % \
372 (self.fstype, rootfs_size * 1024, rootfs_dir, label_str,
373 self.mkfs_extraopts, self.fsuuid, rootfs)
374 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
375
376 def prepare_rootfs_msdos(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
377 native_sysroot, pseudo):
378 """
379 Prepare content for a msdos/vfat rootfs partition.
380 """
381 du_cmd = "du -bks %s" % rootfs_dir
382 out = exec_cmd(du_cmd)
383 blocks = int(out.split()[0])
384
385 rootfs_size = self.get_rootfs_size(blocks)
386
387 label_str = "-n boot"
388 if self.label:
389 label_str = "-n %s" % self.label
390
391 size_str = ""
392
393 extraopts = self.mkfs_extraopts or '-S 512'
394
395 dosfs_cmd = "mkdosfs %s -i %s %s %s -C %s %d" % \
396 (label_str, self.fsuuid, size_str, extraopts, rootfs,
397 rootfs_size)
398 exec_native_cmd(dosfs_cmd, native_sysroot)
399
400 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (rootfs, rootfs_dir)
401 exec_native_cmd(mcopy_cmd, native_sysroot)
402
403 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
404 mcopy_cmd = "mcopy -m -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path)
405 exec_native_cmd(mcopy_cmd, native_sysroot)
406
407 chmod_cmd = "chmod 644 %s" % rootfs
408 exec_cmd(chmod_cmd)
409
410 prepare_rootfs_vfat = prepare_rootfs_msdos
411
412 def prepare_rootfs_squashfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
413 native_sysroot, pseudo):
414 """
415 Prepare content for a squashfs rootfs partition.
416 """
417 extraopts = self.mkfs_extraopts or '-noappend'
418 squashfs_cmd = "mksquashfs %s %s %s" % \
419 (rootfs_dir, rootfs, extraopts)
420 exec_native_cmd(squashfs_cmd, native_sysroot, pseudo=pseudo)
421
422 def prepare_rootfs_erofs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
423 native_sysroot, pseudo):
424 """
425 Prepare content for a erofs rootfs partition.
426 """
427 extraopts = self.mkfs_extraopts or ''
428 erofs_cmd = "mkfs.erofs %s -U %s %s %s" % \
429 (extraopts, self.fsuuid, rootfs, rootfs_dir)
430 exec_native_cmd(erofs_cmd, native_sysroot, pseudo=pseudo)
431
432 def prepare_empty_partition_none(self, rootfs, oe_builddir, native_sysroot):
433 pass
434
435 def prepare_empty_partition_ext(self, rootfs, oe_builddir,
436 native_sysroot):
437 """
438 Prepare an empty ext2/3/4 partition.
439 """
440 size = self.disk_size
441 with open(rootfs, 'w') as sparse:
442 os.ftruncate(sparse.fileno(), size * 1024)
443
444 extraopts = self.mkfs_extraopts or "-i 8192"
445
446 # use hash_seed to generate reproducible ext4 images
447 (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None)
448
449 label_str = ""
450 if self.label:
451 label_str = "-L %s" % self.label
452
453 mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \
454 (self.fstype, extraopts, label_str, self.fsuuid, rootfs)
455 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
456
457 self.check_for_Y2038_problem(rootfs, native_sysroot)
458
459 def prepare_empty_partition_btrfs(self, rootfs, oe_builddir,
460 native_sysroot):
461 """
462 Prepare an empty btrfs partition.
463 """
464 size = self.disk_size
465 with open(rootfs, 'w') as sparse:
466 os.ftruncate(sparse.fileno(), size * 1024)
467
468 label_str = ""
469 if self.label:
470 label_str = "-L %s" % self.label
471
472 mkfs_cmd = "mkfs.%s -b %d %s -U %s %s %s" % \
473 (self.fstype, self.size * 1024, label_str, self.fsuuid,
474 self.mkfs_extraopts, rootfs)
475 exec_native_cmd(mkfs_cmd, native_sysroot)
476
477 def prepare_empty_partition_msdos(self, rootfs, oe_builddir,
478 native_sysroot):
479 """
480 Prepare an empty vfat partition.
481 """
482 blocks = self.disk_size
483
484 label_str = "-n boot"
485 if self.label:
486 label_str = "-n %s" % self.label
487
488 size_str = ""
489
490 extraopts = self.mkfs_extraopts or '-S 512'
491
492 dosfs_cmd = "mkdosfs %s -i %s %s %s -C %s %d" % \
493 (label_str, self.fsuuid, extraopts, size_str, rootfs,
494 blocks)
495
496 exec_native_cmd(dosfs_cmd, native_sysroot)
497
498 chmod_cmd = "chmod 644 %s" % rootfs
499 exec_cmd(chmod_cmd)
500
501 prepare_empty_partition_vfat = prepare_empty_partition_msdos
502
503 def prepare_swap_partition(self, cr_workdir, oe_builddir, native_sysroot):
504 """
505 Prepare a swap partition.
506 """
507 path = "%s/fs.%s" % (cr_workdir, self.fstype)
508
509 with open(path, 'w') as sparse:
510 os.ftruncate(sparse.fileno(), self.size * 1024)
511
512 label_str = ""
513 if self.label:
514 label_str = "-L %s" % self.label
515
516 mkswap_cmd = "mkswap %s -U %s %s" % (label_str, self.fsuuid, path)
517 exec_native_cmd(mkswap_cmd, native_sysroot)
518
519 def check_for_Y2038_problem(self, rootfs, native_sysroot):
520 """
521 Check if the filesystem is affected by the Y2038 problem
522 (Y2038 problem = 32 bit time_t overflow in January 2038)
523 """
524 def get_err_str(part):
525 err = "The {} filesystem {} has no Y2038 support."
526 if part.mountpoint:
527 args = [part.fstype, "mounted at %s" % part.mountpoint]
528 elif part.label:
529 args = [part.fstype, "labeled '%s'" % part.label]
530 elif part.part_name:
531 args = [part.fstype, "in partition '%s'" % part.part_name]
532 else:
533 args = [part.fstype, "in partition %s" % part.num]
534 return err.format(*args)
535
536 # ext2 and ext3 are always affected by the Y2038 problem
537 if self.fstype in ["ext2", "ext3"]:
538 logger.warn(get_err_str(self))
539 return
540
541 ret, out = exec_native_cmd("dumpe2fs %s" % rootfs, native_sysroot)
542
543 # if ext4 is affected by the Y2038 problem depends on the inode size
544 for line in out.splitlines():
545 if line.startswith("Inode size:"):
546 size = int(line.split(":")[1].strip())
547 if size < 256:
548 logger.warn("%s Inodes (of size %d) are too small." %
549 (get_err_str(self), size))
550 break
551
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py
deleted file mode 100644
index b64568339b..0000000000
--- a/scripts/lib/wic/pluginbase.py
+++ /dev/null
@@ -1,144 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright (c) 2011 Intel, Inc.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8__all__ = ['ImagerPlugin', 'SourcePlugin']
9
10import os
11import logging
12import types
13
14from collections import defaultdict
15import importlib
16import importlib.util
17
18from wic import WicError
19from wic.misc import get_bitbake_var
20
21PLUGIN_TYPES = ["imager", "source"]
22
23SCRIPTS_PLUGIN_DIR = ["scripts/lib/wic/plugins", "lib/wic/plugins"]
24
25logger = logging.getLogger('wic')
26
27PLUGINS = defaultdict(dict)
28
29class PluginMgr:
30 _plugin_dirs = []
31
32 @classmethod
33 def get_plugins(cls, ptype):
34 """Get dictionary of <plugin_name>:<class> pairs."""
35 if ptype not in PLUGIN_TYPES:
36 raise WicError('%s is not valid plugin type' % ptype)
37
38 # collect plugin directories
39 if not cls._plugin_dirs:
40 cls._plugin_dirs = [os.path.join(os.path.dirname(__file__), 'plugins')]
41 layers = get_bitbake_var("BBLAYERS") or ''
42 for layer_path in layers.split():
43 for script_plugin_dir in SCRIPTS_PLUGIN_DIR:
44 path = os.path.join(layer_path, script_plugin_dir)
45 path = os.path.abspath(os.path.expanduser(path))
46 if path not in cls._plugin_dirs and os.path.isdir(path):
47 cls._plugin_dirs.insert(0, path)
48
49 if ptype not in PLUGINS:
50 # load all ptype plugins
51 for pdir in cls._plugin_dirs:
52 ppath = os.path.join(pdir, ptype)
53 if os.path.isdir(ppath):
54 for fname in os.listdir(ppath):
55 if fname.endswith('.py'):
56 mname = fname[:-3]
57 mpath = os.path.join(ppath, fname)
58 logger.debug("loading plugin module %s", mpath)
59 spec = importlib.util.spec_from_file_location(mname, mpath)
60 module = importlib.util.module_from_spec(spec)
61 spec.loader.exec_module(module)
62
63 return PLUGINS.get(ptype)
64
65class PluginMeta(type):
66 def __new__(cls, name, bases, attrs):
67 class_type = type.__new__(cls, name, bases, attrs)
68 if 'name' in attrs:
69 PLUGINS[class_type.wic_plugin_type][attrs['name']] = class_type
70
71 return class_type
72
73class ImagerPlugin(metaclass=PluginMeta):
74 wic_plugin_type = "imager"
75
76 def do_create(self):
77 raise WicError("Method %s.do_create is not implemented" %
78 self.__class__.__name__)
79
80class SourcePlugin(metaclass=PluginMeta):
81 wic_plugin_type = "source"
82 """
83 The methods that can be implemented by --source plugins.
84
85 Any methods not implemented in a subclass inherit these.
86 """
87
88 @classmethod
89 def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,
90 bootimg_dir, kernel_dir, native_sysroot):
91 """
92 Called after all partitions have been prepared and assembled into a
93 disk image. This provides a hook to allow finalization of a
94 disk image e.g. to write an MBR to it.
95 """
96 logger.debug("SourcePlugin: do_install_disk: disk: %s", disk_name)
97
98 @classmethod
99 def do_stage_partition(cls, part, source_params, creator, cr_workdir,
100 oe_builddir, bootimg_dir, kernel_dir,
101 native_sysroot):
102 """
103 Special content staging hook called before do_prepare_partition(),
104 normally empty.
105
106 Typically, a partition will just use the passed-in parame e.g
107 straight bootimg_dir, etc, but in some cases, things need to
108 be more tailored e.g. to use a deploy dir + /boot, etc. This
109 hook allows those files to be staged in a customized fashion.
110 Not that get_bitbake_var() allows you to acces non-standard
111 variables that you might want to use for this.
112 """
113 logger.debug("SourcePlugin: do_stage_partition: part: %s", part)
114
115 @classmethod
116 def do_configure_partition(cls, part, source_params, creator, cr_workdir,
117 oe_builddir, bootimg_dir, kernel_dir,
118 native_sysroot):
119 """
120 Called before do_prepare_partition(), typically used to create
121 custom configuration files for a partition, for example
122 syslinux or grub config files.
123 """
124 logger.debug("SourcePlugin: do_configure_partition: part: %s", part)
125
126 @classmethod
127 def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
128 oe_builddir, bootimg_dir, kernel_dir, rootfs_dir,
129 native_sysroot):
130 """
131 Called to do the actual content population for a partition i.e. it
132 'prepares' the partition to be incorporated into the image.
133 """
134 logger.debug("SourcePlugin: do_prepare_partition: part: %s", part)
135
136 @classmethod
137 def do_post_partition(cls, part, source_params, creator, cr_workdir,
138 oe_builddir, bootimg_dir, kernel_dir, rootfs_dir,
139 native_sysroot):
140 """
141 Called after the partition is created. It is useful to add post
142 operations e.g. security signing the partition.
143 """
144 logger.debug("SourcePlugin: do_post_partition: part: %s", part)
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py
deleted file mode 100644
index 2124ceac7f..0000000000
--- a/scripts/lib/wic/plugins/imager/direct.py
+++ /dev/null
@@ -1,704 +0,0 @@
1#
2# Copyright (c) 2013, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7# This implements the 'direct' imager plugin class for 'wic'
8#
9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
11#
12
13import logging
14import os
15import random
16import shutil
17import tempfile
18import uuid
19
20from time import strftime
21
22from oe.path import copyhardlinktree
23
24from wic import WicError
25from wic.filemap import sparse_copy
26from wic.ksparser import KickStart, KickStartError
27from wic.pluginbase import PluginMgr, ImagerPlugin
28from wic.misc import get_bitbake_var, exec_cmd, exec_native_cmd
29
30logger = logging.getLogger('wic')
31
32class DirectPlugin(ImagerPlugin):
33 """
34 Install a system into a file containing a partitioned disk image.
35
36 An image file is formatted with a partition table, each partition
37 created from a rootfs or other OpenEmbedded build artifact and dd'ed
38 into the virtual disk. The disk image can subsequently be dd'ed onto
39 media and used on actual hardware.
40 """
41 name = 'direct'
42
43 def __init__(self, wks_file, rootfs_dir, bootimg_dir, kernel_dir,
44 native_sysroot, oe_builddir, options):
45 try:
46 self.ks = KickStart(wks_file)
47 except KickStartError as err:
48 raise WicError(str(err))
49
50 # parse possible 'rootfs=name' items
51 self.rootfs_dir = dict(rdir.split('=') for rdir in rootfs_dir.split(' '))
52 self.bootimg_dir = bootimg_dir
53 self.kernel_dir = kernel_dir
54 self.native_sysroot = native_sysroot
55 self.oe_builddir = oe_builddir
56
57 self.debug = options.debug
58 self.outdir = options.outdir
59 self.compressor = options.compressor
60 self.bmap = options.bmap
61 self.no_fstab_update = options.no_fstab_update
62 self.updated_fstab_path = None
63
64 self.name = "%s-%s" % (os.path.splitext(os.path.basename(wks_file))[0],
65 strftime("%Y%m%d%H%M"))
66 self.workdir = self.setup_workdir(options.workdir)
67 self._image = None
68 self.ptable_format = self.ks.bootloader.ptable
69 self.parts = self.ks.partitions
70
71 # as a convenience, set source to the boot partition source
72 # instead of forcing it to be set via bootloader --source
73 for part in self.parts:
74 if not self.ks.bootloader.source and part.mountpoint == "/boot":
75 self.ks.bootloader.source = part.source
76 break
77
78 image_path = self._full_path(self.workdir, self.parts[0].disk, "direct")
79 self._image = PartitionedImage(image_path, self.ptable_format,
80 self.parts, self.native_sysroot,
81 options.extra_space)
82
83 def setup_workdir(self, workdir):
84 if workdir:
85 if os.path.exists(workdir):
86 raise WicError("Internal workdir '%s' specified in wic arguments already exists!" % (workdir))
87
88 os.makedirs(workdir)
89 return workdir
90 else:
91 return tempfile.mkdtemp(dir=self.outdir, prefix='tmp.wic.')
92
93 def do_create(self):
94 """
95 Plugin entry point.
96 """
97 try:
98 self.create()
99 self.assemble()
100 self.finalize()
101 self.print_info()
102 finally:
103 self.cleanup()
104
105 def update_fstab(self, image_rootfs):
106 """Assume partition order same as in wks"""
107 if not image_rootfs:
108 return
109
110 fstab_path = image_rootfs + "/etc/fstab"
111 if not os.path.isfile(fstab_path):
112 return
113
114 with open(fstab_path) as fstab:
115 fstab_lines = fstab.readlines()
116
117 updated = False
118 for part in self.parts:
119 if not part.realnum or not part.mountpoint \
120 or part.mountpoint == "/" or not (part.mountpoint.startswith('/') or part.mountpoint == "swap"):
121 continue
122
123 if part.use_uuid:
124 if part.fsuuid:
125 # FAT UUID is different from others
126 if len(part.fsuuid) == 10:
127 device_name = "UUID=%s-%s" % \
128 (part.fsuuid[2:6], part.fsuuid[6:])
129 else:
130 device_name = "UUID=%s" % part.fsuuid
131 else:
132 device_name = "PARTUUID=%s" % part.uuid
133 elif part.use_label:
134 device_name = "LABEL=%s" % part.label
135 else:
136 # mmc device partitions are named mmcblk0p1, mmcblk0p2..
137 prefix = 'p' if part.disk.startswith('mmcblk') else ''
138 device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum)
139
140 opts = part.fsopts if part.fsopts else "defaults"
141 passno = part.fspassno if part.fspassno else "0"
142 line = "\t".join([device_name, part.mountpoint, part.fstype,
143 opts, "0", passno]) + "\n"
144
145 fstab_lines.append(line)
146 updated = True
147
148 if updated:
149 self.updated_fstab_path = os.path.join(self.workdir, "fstab")
150 with open(self.updated_fstab_path, "w") as f:
151 f.writelines(fstab_lines)
152 if os.getenv('SOURCE_DATE_EPOCH'):
153 fstab_time = int(os.getenv('SOURCE_DATE_EPOCH'))
154 os.utime(self.updated_fstab_path, (fstab_time, fstab_time))
155
156 def _full_path(self, path, name, extention):
157 """ Construct full file path to a file we generate. """
158 return os.path.join(path, "%s-%s.%s" % (self.name, name, extention))
159
160 #
161 # Actual implemention
162 #
163 def create(self):
164 """
165 For 'wic', we already have our build artifacts - we just create
166 filesystems from the artifacts directly and combine them into
167 a partitioned image.
168 """
169 if not self.no_fstab_update:
170 self.update_fstab(self.rootfs_dir.get("ROOTFS_DIR"))
171
172 for part in self.parts:
173 # get rootfs size from bitbake variable if it's not set in .ks file
174 if not part.size:
175 # and if rootfs name is specified for the partition
176 image_name = self.rootfs_dir.get(part.rootfs_dir)
177 if image_name and os.path.sep not in image_name:
178 # Bitbake variable ROOTFS_SIZE is calculated in
179 # Image._get_rootfs_size method from meta/lib/oe/image.py
180 # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
181 # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE
182 rsize_bb = get_bitbake_var('ROOTFS_SIZE', image_name)
183 if rsize_bb:
184 part.size = int(round(float(rsize_bb)))
185
186 self._image.prepare(self)
187 self._image.layout_partitions()
188 self._image.create()
189
190 def assemble(self):
191 """
192 Assemble partitions into disk image
193 """
194 self._image.assemble()
195
196 def finalize(self):
197 """
198 Finalize the disk image.
199
200 For example, prepare the image to be bootable by e.g.
201 creating and installing a bootloader configuration.
202 """
203 source_plugin = self.ks.bootloader.source
204 disk_name = self.parts[0].disk
205 if source_plugin:
206 plugin = PluginMgr.get_plugins('source')[source_plugin]
207 plugin.do_install_disk(self._image, disk_name, self, self.workdir,
208 self.oe_builddir, self.bootimg_dir,
209 self.kernel_dir, self.native_sysroot)
210
211 full_path = self._image.path
212 # Generate .bmap
213 if self.bmap:
214 logger.debug("Generating bmap file for %s", disk_name)
215 python = os.path.join(self.native_sysroot, 'usr/bin/python3-native/python3')
216 bmaptool = os.path.join(self.native_sysroot, 'usr/bin/bmaptool')
217 exec_native_cmd("%s %s create %s -o %s.bmap" % \
218 (python, bmaptool, full_path, full_path), self.native_sysroot)
219 # Compress the image
220 if self.compressor:
221 logger.debug("Compressing disk %s with %s", disk_name, self.compressor)
222 exec_cmd("%s %s" % (self.compressor, full_path))
223
224 def print_info(self):
225 """
226 Print the image(s) and artifacts used, for the user.
227 """
228 msg = "The new image(s) can be found here:\n"
229
230 extension = "direct" + {"gzip": ".gz",
231 "bzip2": ".bz2",
232 "xz": ".xz",
233 None: ""}.get(self.compressor)
234 full_path = self._full_path(self.outdir, self.parts[0].disk, extension)
235 msg += ' %s\n\n' % full_path
236
237 msg += 'The following build artifacts were used to create the image(s):\n'
238 for part in self.parts:
239 if part.rootfs_dir is None:
240 continue
241 if part.mountpoint == '/':
242 suffix = ':'
243 else:
244 suffix = '["%s"]:' % (part.mountpoint or part.label)
245 rootdir = part.rootfs_dir
246 msg += ' ROOTFS_DIR%s%s\n' % (suffix.ljust(20), rootdir)
247
248 msg += ' BOOTIMG_DIR: %s\n' % self.bootimg_dir
249 msg += ' KERNEL_DIR: %s\n' % self.kernel_dir
250 msg += ' NATIVE_SYSROOT: %s\n' % self.native_sysroot
251
252 logger.info(msg)
253
254 @property
255 def rootdev(self):
256 """
257 Get root device name to use as a 'root' parameter
258 in kernel command line.
259
260 Assume partition order same as in wks
261 """
262 for part in self.parts:
263 if part.mountpoint == "/":
264 if part.uuid:
265 return "PARTUUID=%s" % part.uuid
266 elif part.label and self.ptable_format != 'msdos':
267 return "PARTLABEL=%s" % part.label
268 else:
269 suffix = 'p' if part.disk.startswith('mmcblk') else ''
270 return "/dev/%s%s%-d" % (part.disk, suffix, part.realnum)
271
272 def cleanup(self):
273 if self._image:
274 self._image.cleanup()
275
276 # Move results to the output dir
277 if not os.path.exists(self.outdir):
278 os.makedirs(self.outdir)
279
280 for fname in os.listdir(self.workdir):
281 path = os.path.join(self.workdir, fname)
282 if os.path.isfile(path):
283 shutil.move(path, os.path.join(self.outdir, fname))
284
285 # remove work directory when it is not in debugging mode
286 if not self.debug:
287 shutil.rmtree(self.workdir, ignore_errors=True)
288
289# Overhead of the MBR partitioning scheme (just one sector)
290MBR_OVERHEAD = 1
291
292# Overhead of the GPT partitioning scheme
293GPT_OVERHEAD = 34
294
295# Size of a sector in bytes
296SECTOR_SIZE = 512
297
298class PartitionedImage():
299 """
300 Partitioned image in a file.
301 """
302
303 def __init__(self, path, ptable_format, partitions, native_sysroot=None, extra_space=0):
304 self.path = path # Path to the image file
305 self.numpart = 0 # Number of allocated partitions
306 self.realpart = 0 # Number of partitions in the partition table
307 self.primary_part_num = 0 # Number of primary partitions (msdos)
308 self.extendedpart = 0 # Create extended partition before this logical partition (msdos)
309 self.extended_size_sec = 0 # Size of exteded partition (msdos)
310 self.logical_part_cnt = 0 # Number of total logical paritions (msdos)
311 self.offset = 0 # Offset of next partition (in sectors)
312 self.min_size = 0 # Minimum required disk size to fit
313 # all partitions (in bytes)
314 self.ptable_format = ptable_format # Partition table format
315 # Disk system identifier
316 if os.getenv('SOURCE_DATE_EPOCH'):
317 self.identifier = random.Random(int(os.getenv('SOURCE_DATE_EPOCH'))).randint(1, 0xffffffff)
318 else:
319 self.identifier = random.SystemRandom().randint(1, 0xffffffff)
320
321 self.partitions = partitions
322 self.partimages = []
323 # Size of a sector used in calculations
324 sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE')
325 if sector_size_str is not None:
326 try:
327 self.sector_size = int(sector_size_str)
328 except ValueError:
329 self.sector_size = SECTOR_SIZE
330 else:
331 self.sector_size = SECTOR_SIZE
332
333 self.native_sysroot = native_sysroot
334 num_real_partitions = len([p for p in self.partitions if not p.no_table])
335 self.extra_space = extra_space
336
337 # calculate the real partition number, accounting for partitions not
338 # in the partition table and logical partitions
339 realnum = 0
340 for part in self.partitions:
341 if part.no_table:
342 part.realnum = 0
343 else:
344 realnum += 1
345 if self.ptable_format == 'msdos' and realnum > 3 and num_real_partitions > 4:
346 part.realnum = realnum + 1
347 continue
348 part.realnum = realnum
349
350 # generate parition and filesystem UUIDs
351 for part in self.partitions:
352 if not part.uuid and part.use_uuid:
353 if self.ptable_format in ('gpt', 'gpt-hybrid'):
354 part.uuid = str(uuid.uuid4())
355 else: # msdos partition table
356 part.uuid = '%08x-%02d' % (self.identifier, part.realnum)
357 if not part.fsuuid:
358 if part.fstype == 'vfat' or part.fstype == 'msdos':
359 part.fsuuid = '0x' + str(uuid.uuid4())[:8].upper()
360 else:
361 part.fsuuid = str(uuid.uuid4())
362 else:
363 #make sure the fsuuid for vfat/msdos align with format 0xYYYYYYYY
364 if part.fstype == 'vfat' or part.fstype == 'msdos':
365 if part.fsuuid.upper().startswith("0X"):
366 part.fsuuid = '0x' + part.fsuuid.upper()[2:].rjust(8,"0")
367 else:
368 part.fsuuid = '0x' + part.fsuuid.upper().rjust(8,"0")
369
370 def prepare(self, imager):
371 """Prepare an image. Call prepare method of all image partitions."""
372 for part in self.partitions:
373 # need to create the filesystems in order to get their
374 # sizes before we can add them and do the layout.
375 part.prepare(imager, imager.workdir, imager.oe_builddir,
376 imager.rootfs_dir, imager.bootimg_dir,
377 imager.kernel_dir, imager.native_sysroot,
378 imager.updated_fstab_path)
379
380 # Converting kB to sectors for parted
381 part.size_sec = part.disk_size * 1024 // self.sector_size
382
383 def layout_partitions(self):
384 """ Layout the partitions, meaning calculate the position of every
385 partition on the disk. The 'ptable_format' parameter defines the
386 partition table format and may be "msdos". """
387
388 logger.debug("Assigning %s partitions to disks", self.ptable_format)
389
390 # The number of primary and logical partitions. Extended partition and
391 # partitions not listed in the table are not included.
392 num_real_partitions = len([p for p in self.partitions if not p.no_table])
393
394 # Go through partitions in the order they are added in .ks file
395 for num in range(len(self.partitions)):
396 part = self.partitions[num]
397
398 if self.ptable_format == 'msdos' and part.part_name:
399 raise WicError("setting custom partition name is not " \
400 "implemented for msdos partitions")
401
402 if self.ptable_format == 'msdos' and part.part_type:
403 # The --part-type can also be implemented for MBR partitions,
404 # in which case it would map to the 1-byte "partition type"
405 # filed at offset 3 of the partition entry.
406 raise WicError("setting custom partition type is not " \
407 "implemented for msdos partitions")
408
409 if part.mbr and self.ptable_format != 'gpt-hybrid':
410 raise WicError("Partition may only be included in MBR with " \
411 "a gpt-hybrid partition table")
412
413 # Get the disk where the partition is located
414 self.numpart += 1
415 if not part.no_table:
416 self.realpart += 1
417
418 if self.numpart == 1:
419 if self.ptable_format == "msdos":
420 overhead = MBR_OVERHEAD
421 elif self.ptable_format in ("gpt", "gpt-hybrid"):
422 overhead = GPT_OVERHEAD
423
424 # Skip one sector required for the partitioning scheme overhead
425 self.offset += overhead
426
427 if self.ptable_format == "msdos":
428 if self.primary_part_num > 3 or \
429 (self.extendedpart == 0 and self.primary_part_num >= 3 and num_real_partitions > 4):
430 part.type = 'logical'
431 # Reserve a sector for EBR for every logical partition
432 # before alignment is performed.
433 if part.type == 'logical':
434 self.offset += 2
435
436 align_sectors = 0
437 if part.align:
438 # If not first partition and we do have alignment set we need
439 # to align the partition.
440 # FIXME: This leaves a empty spaces to the disk. To fill the
441 # gaps we could enlargea the previous partition?
442
443 # Calc how much the alignment is off.
444 align_sectors = self.offset % (part.align * 1024 // self.sector_size)
445
446 if align_sectors:
447 # If partition is not aligned as required, we need
448 # to move forward to the next alignment point
449 align_sectors = (part.align * 1024 // self.sector_size) - align_sectors
450
451 logger.debug("Realignment for %s%s with %s sectors, original"
452 " offset %s, target alignment is %sK.",
453 part.disk, self.numpart, align_sectors,
454 self.offset, part.align)
455
456 # increase the offset so we actually start the partition on right alignment
457 self.offset += align_sectors
458
459 if part.offset is not None:
460 offset = part.offset // self.sector_size
461
462 if offset * self.sector_size != part.offset:
463 raise WicError("Could not place %s%s at offset %d with sector size %d" % (part.disk, self.numpart, part.offset, self.sector_size))
464
465 delta = offset - self.offset
466 if delta < 0:
467 raise WicError("Could not place %s%s at offset %d: next free sector is %d (delta: %d)" % (part.disk, self.numpart, part.offset, self.offset, delta))
468
469 logger.debug("Skipping %d sectors to place %s%s at offset %dK",
470 delta, part.disk, self.numpart, part.offset)
471
472 self.offset = offset
473
474 part.start = self.offset
475 self.offset += part.size_sec
476
477 if not part.no_table:
478 part.num = self.realpart
479 else:
480 part.num = 0
481
482 if self.ptable_format == "msdos" and not part.no_table:
483 if part.type == 'logical':
484 self.logical_part_cnt += 1
485 part.num = self.logical_part_cnt + 4
486 if self.extendedpart == 0:
487 # Create extended partition as a primary partition
488 self.primary_part_num += 1
489 self.extendedpart = part.num
490 else:
491 self.extended_size_sec += align_sectors
492 self.extended_size_sec += part.size_sec + 2
493 else:
494 self.primary_part_num += 1
495 part.num = self.primary_part_num
496
497 logger.debug("Assigned %s to %s%d, sectors range %d-%d size %d "
498 "sectors (%d bytes).", part.mountpoint, part.disk,
499 part.num, part.start, self.offset - 1, part.size_sec,
500 part.size_sec * self.sector_size)
501
502 # Once all the partitions have been layed out, we can calculate the
503 # minumim disk size
504 self.min_size = self.offset
505 if self.ptable_format in ("gpt", "gpt-hybrid"):
506 self.min_size += GPT_OVERHEAD
507
508 self.min_size *= self.sector_size
509 self.min_size += self.extra_space
510
511 def _create_partition(self, device, parttype, fstype, start, size):
512 """ Create a partition on an image described by the 'device' object. """
513
514 # Start is included to the size so we need to substract one from the end.
515 end = start + size - 1
516 logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors",
517 parttype, start, end, size)
518
519 cmd = "export PARTED_SECTOR_SIZE=%d; parted -s %s unit s mkpart %s" % \
520 (self.sector_size, device, parttype)
521 if fstype:
522 cmd += " %s" % fstype
523 cmd += " %d %d" % (start, end)
524
525 return exec_native_cmd(cmd, self.native_sysroot)
526
527 def _write_identifier(self, device, identifier):
528 logger.debug("Set disk identifier %x", identifier)
529 with open(device, 'r+b') as img:
530 img.seek(0x1B8)
531 img.write(identifier.to_bytes(4, 'little'))
532
533 def _make_disk(self, device, ptable_format, min_size):
534 logger.debug("Creating sparse file %s", device)
535 with open(device, 'w') as sparse:
536 os.ftruncate(sparse.fileno(), min_size)
537
538 logger.debug("Initializing partition table for %s", device)
539 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s mklabel %s" %
540 (self.sector_size, device, ptable_format), self.native_sysroot)
541
542 def _write_disk_guid(self):
543 if self.ptable_format in ('gpt', 'gpt-hybrid'):
544 if os.getenv('SOURCE_DATE_EPOCH'):
545 self.disk_guid = uuid.UUID(int=int(os.getenv('SOURCE_DATE_EPOCH')))
546 else:
547 self.disk_guid = uuid.uuid4()
548
549 logger.debug("Set disk guid %s", self.disk_guid)
550 sfdisk_cmd = "sfdisk --sector-size %s --disk-id %s %s" % \
551 (self.sector_size, self.path, self.disk_guid)
552 exec_native_cmd(sfdisk_cmd, self.native_sysroot)
553
554 def create(self):
555 self._make_disk(self.path,
556 "gpt" if self.ptable_format == "gpt-hybrid" else self.ptable_format,
557 self.min_size)
558
559 self._write_identifier(self.path, self.identifier)
560 self._write_disk_guid()
561
562 if self.ptable_format == "gpt-hybrid":
563 mbr_path = self.path + ".mbr"
564 self._make_disk(mbr_path, "msdos", self.min_size)
565 self._write_identifier(mbr_path, self.identifier)
566
567 logger.debug("Creating partitions")
568
569 hybrid_mbr_part_num = 0
570
571 for part in self.partitions:
572 if part.num == 0:
573 continue
574
575 if self.ptable_format == "msdos" and part.num == self.extendedpart:
576 # Create an extended partition (note: extended
577 # partition is described in MBR and contains all
578 # logical partitions). The logical partitions save a
579 # sector for an EBR just before the start of a
580 # partition. The extended partition must start one
581 # sector before the start of the first logical
582 # partition. This way the first EBR is inside of the
583 # extended partition. Since the extended partitions
584 # starts a sector before the first logical partition,
585 # add a sector at the back, so that there is enough
586 # room for all logical partitions.
587 self._create_partition(self.path, "extended",
588 None, part.start - 2,
589 self.extended_size_sec)
590
591 if part.fstype == "swap":
592 parted_fs_type = "linux-swap"
593 elif part.fstype == "vfat":
594 parted_fs_type = "fat32"
595 elif part.fstype == "msdos":
596 parted_fs_type = "fat16"
597 if not part.system_id:
598 part.system_id = '0x6' # FAT16
599 else:
600 # Type for ext2/ext3/ext4/btrfs
601 parted_fs_type = "ext2"
602
603 # Boot ROM of OMAP boards require vfat boot partition to have an
604 # even number of sectors.
605 if part.mountpoint == "/boot" and part.fstype in ["vfat", "msdos"] \
606 and part.size_sec % 2:
607 logger.debug("Subtracting one sector from '%s' partition to "
608 "get even number of sectors for the partition",
609 part.mountpoint)
610 part.size_sec -= 1
611
612 self._create_partition(self.path, part.type,
613 parted_fs_type, part.start, part.size_sec)
614
615 if self.ptable_format == "gpt-hybrid" and part.mbr:
616 hybrid_mbr_part_num += 1
617 if hybrid_mbr_part_num > 4:
618 raise WicError("Extended MBR partitions are not supported in hybrid MBR")
619 self._create_partition(mbr_path, "primary",
620 parted_fs_type, part.start, part.size_sec)
621
622 if self.ptable_format in ("gpt", "gpt-hybrid") and (part.part_name or part.label):
623 partition_label = part.part_name if part.part_name else part.label
624 logger.debug("partition %d: set name to %s",
625 part.num, partition_label)
626 exec_native_cmd("sfdisk --sector-size %s --part-label %s %d %s" % \
627 (self.sector_size, self.path, part.num,
628 partition_label), self.native_sysroot)
629 if part.part_type:
630 logger.debug("partition %d: set type UID to %s",
631 part.num, part.part_type)
632 exec_native_cmd("sfdisk --sector-size %s --part-type %s %d %s" % \
633 (self.sector_size, self.path, part.num,
634 part.part_type), self.native_sysroot)
635
636 if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"):
637 logger.debug("partition %d: set UUID to %s",
638 part.num, part.uuid)
639 exec_native_cmd("sfdisk --sector-size %s --part-uuid %s %d %s" % \
640 (self.sector_size, self.path, part.num, part.uuid),
641 self.native_sysroot)
642
643 if part.active:
644 flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot"
645 logger.debug("Set '%s' flag for partition '%s' on disk '%s'",
646 flag_name, part.num, self.path)
647 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \
648 (self.sector_size, self.path, part.num, flag_name),
649 self.native_sysroot)
650 if self.ptable_format == 'gpt-hybrid' and part.mbr:
651 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \
652 (self.sector_size, mbr_path, hybrid_mbr_part_num, "boot"),
653 self.native_sysroot)
654 if part.system_id:
655 exec_native_cmd("sfdisk --sector-size %s --part-type %s %s %s" % \
656 (self.sector_size, self.path, part.num, part.system_id),
657 self.native_sysroot)
658
659 if part.hidden and self.ptable_format == "gpt":
660 logger.debug("Set hidden attribute for partition '%s' on disk '%s'",
661 part.num, self.path)
662 exec_native_cmd("sfdisk --sector-size %s --part-attrs %s %s RequiredPartition" % \
663 (self.sector_size, self.path, part.num),
664 self.native_sysroot)
665
666 if self.ptable_format == "gpt-hybrid":
667 # Write a protective GPT partition
668 hybrid_mbr_part_num += 1
669 if hybrid_mbr_part_num > 4:
670 raise WicError("Extended MBR partitions are not supported in hybrid MBR")
671
672 # parted cannot directly create a protective GPT partition, so
673 # create with an arbitrary type, then change it to the correct type
674 # with sfdisk
675 self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD)
676 exec_native_cmd("sfdisk --sector-size %s --part-type %s %d 0xee" % \
677 (self.sector_size, mbr_path, hybrid_mbr_part_num),
678 self.native_sysroot)
679
680 # Copy hybrid MBR
681 with open(mbr_path, "rb") as mbr_file:
682 with open(self.path, "r+b") as image_file:
683 mbr = mbr_file.read(512)
684 image_file.write(mbr)
685
686 def cleanup(self):
687 pass
688
689 def assemble(self):
690 logger.debug("Installing partitions")
691
692 for part in self.partitions:
693 source = part.source_file
694 if source:
695 # install source_file contents into a partition
696 sparse_copy(source, self.path, seek=part.start * self.sector_size)
697
698 logger.debug("Installed %s in partition %d, sectors %d-%d, "
699 "size %d sectors", source, part.num, part.start,
700 part.start + part.size_sec - 1, part.size_sec)
701
702 partimage = self.path + '.p%d' % part.num
703 os.rename(source, partimage)
704 self.partimages.append(partimage)
diff --git a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
deleted file mode 100644
index 5bd7390680..0000000000
--- a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
+++ /dev/null
@@ -1,213 +0,0 @@
1#
2# This program is free software; you can redistribute it and/or modify
3# it under the terms of the GNU General Public License version 2 as
4# published by the Free Software Foundation.
5#
6# This program is distributed in the hope that it will be useful,
7# but WITHOUT ANY WARRANTY; without even the implied warranty of
8# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9# GNU General Public License for more details.
10#
11# You should have received a copy of the GNU General Public License along
12# with this program; if not, write to the Free Software Foundation, Inc.,
13# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
14#
15# DESCRIPTION
16# This implements the 'bootimg-biosplusefi' source plugin class for 'wic'
17#
18# AUTHORS
19# William Bourque <wbourque [at) gmail.com>
20
21import types
22
23from wic.pluginbase import SourcePlugin
24from importlib.machinery import SourceFileLoader
25
26class BootimgBiosPlusEFIPlugin(SourcePlugin):
27 """
28 Create MBR + EFI boot partition
29
30 This plugin creates a boot partition that contains both
31 legacy BIOS and EFI content. It will be able to boot from both.
32 This is useful when managing PC fleet with some older machines
33 without EFI support.
34
35 Note it is possible to create an image that can boot from both
36 legacy BIOS and EFI by defining two partitions : one with arg
37 --source bootimg-efi and another one with --source bootimg-pcbios.
38 However, this method has the obvious downside that it requires TWO
39 partitions to be created on the storage device.
40 Both partitions will also be marked as "bootable" which does not work on
41 most BIOS, has BIOS often uses the "bootable" flag to determine
42 what to boot. If you have such a BIOS, you need to manually remove the
43 "bootable" flag from the EFI partition for the drive to be bootable.
44 Having two partitions also seems to confuse wic : the content of
45 the first partition will be duplicated into the second, even though it
46 will not be used at all.
47
48 Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin
49 allows you to have more than only a single rootfs partitions and does
50 not turn the rootfs into an initramfs RAM image.
51
52 This plugin is made to put everything into a single /boot partition so it
53 does not have the limitations listed above.
54
55 The plugin is made so it does tries not to reimplement what's already
56 been done in other plugins; as such it imports "bootimg-pcbios"
57 and "bootimg-efi".
58 Plugin "bootimg-pcbios" is used to generate legacy BIOS boot.
59 Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it
60 requires a --sourceparams argument to know which loader to use; refer
61 to "bootimg-efi" code/documentation for the list of loader.
62
63 Imports are handled with "SourceFileLoader" from importlib as it is
64 otherwise very difficult to import module that has hyphen "-" in their
65 filename.
66 The SourcePlugin() methods used in the plugins (do_install_disk,
67 do_configure_partition, do_prepare_partition) are then called on both,
68 beginning by "bootimg-efi".
69
70 Plugin options, such as "--sourceparams" can still be passed to a
71 plugin, as long they does not cause issue in the other plugin.
72
73 Example wic configuration:
74 part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\
75 --ondisk sda --label os_boot --active --align 1024 --use-uuid
76 """
77
78 name = 'bootimg-biosplusefi'
79
80 __PCBIOS_MODULE_NAME = "bootimg-pcbios"
81 __EFI_MODULE_NAME = "bootimg-efi"
82
83 __imgEFIObj = None
84 __imgBiosObj = None
85
86 @classmethod
87 def __init__(cls):
88 """
89 Constructor (init)
90 """
91
92 # XXX
93 # For some reasons, __init__ constructor is never called.
94 # Something to do with how pluginbase works?
95 cls.__instanciateSubClasses()
96
97 @classmethod
98 def __instanciateSubClasses(cls):
99 """
100
101 """
102
103 # Import bootimg-pcbios (class name "BootimgPcbiosPlugin")
104 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
105 cls.__PCBIOS_MODULE_NAME + ".py")
106 loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath)
107 mod = types.ModuleType(loader.name)
108 loader.exec_module(mod)
109 cls.__imgBiosObj = mod.BootimgPcbiosPlugin()
110
111 # Import bootimg-efi (class name "BootimgEFIPlugin")
112 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
113 cls.__EFI_MODULE_NAME + ".py")
114 loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath)
115 mod = types.ModuleType(loader.name)
116 loader.exec_module(mod)
117 cls.__imgEFIObj = mod.BootimgEFIPlugin()
118
119 @classmethod
120 def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,
121 bootimg_dir, kernel_dir, native_sysroot):
122 """
123 Called after all partitions have been prepared and assembled into a
124 disk image.
125 """
126
127 if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
128 cls.__instanciateSubClasses()
129
130 cls.__imgEFIObj.do_install_disk(
131 disk,
132 disk_name,
133 creator,
134 workdir,
135 oe_builddir,
136 bootimg_dir,
137 kernel_dir,
138 native_sysroot)
139
140 cls.__imgBiosObj.do_install_disk(
141 disk,
142 disk_name,
143 creator,
144 workdir,
145 oe_builddir,
146 bootimg_dir,
147 kernel_dir,
148 native_sysroot)
149
150 @classmethod
151 def do_configure_partition(cls, part, source_params, creator, cr_workdir,
152 oe_builddir, bootimg_dir, kernel_dir,
153 native_sysroot):
154 """
155 Called before do_prepare_partition()
156 """
157
158 if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
159 cls.__instanciateSubClasses()
160
161 cls.__imgEFIObj.do_configure_partition(
162 part,
163 source_params,
164 creator,
165 cr_workdir,
166 oe_builddir,
167 bootimg_dir,
168 kernel_dir,
169 native_sysroot)
170
171 cls.__imgBiosObj.do_configure_partition(
172 part,
173 source_params,
174 creator,
175 cr_workdir,
176 oe_builddir,
177 bootimg_dir,
178 kernel_dir,
179 native_sysroot)
180
181 @classmethod
182 def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
183 oe_builddir, bootimg_dir, kernel_dir,
184 rootfs_dir, native_sysroot):
185 """
186 Called to do the actual content population for a partition i.e. it
187 'prepares' the partition to be incorporated into the image.
188 """
189
190 if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
191 cls.__instanciateSubClasses()
192
193 cls.__imgEFIObj.do_prepare_partition(
194 part,
195 source_params,
196 creator,
197 cr_workdir,
198 oe_builddir,
199 bootimg_dir,
200 kernel_dir,
201 rootfs_dir,
202 native_sysroot)
203
204 cls.__imgBiosObj.do_prepare_partition(
205 part,
206 source_params,
207 creator,
208 cr_workdir,
209 oe_builddir,
210 bootimg_dir,
211 kernel_dir,
212 rootfs_dir,
213 native_sysroot)
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg-efi.py
deleted file mode 100644
index 38da5080fb..0000000000
--- a/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ /dev/null
@@ -1,435 +0,0 @@
1#
2# Copyright (c) 2014, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7# This implements the 'bootimg-efi' source plugin class for 'wic'
8#
9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
11#
12
13import logging
14import os
15import tempfile
16import shutil
17import re
18
19from glob import glob
20
21from wic import WicError
22from wic.engine import get_custom_config
23from wic.pluginbase import SourcePlugin
24from wic.misc import (exec_cmd, exec_native_cmd,
25 get_bitbake_var, BOOTDD_EXTRA_SPACE)
26
27logger = logging.getLogger('wic')
28
29class BootimgEFIPlugin(SourcePlugin):
30 """
31 Create EFI boot partition.
32 This plugin supports GRUB 2 and systemd-boot bootloaders.
33 """
34
35 name = 'bootimg-efi'
36
37 @classmethod
38 def _copy_additional_files(cls, hdddir, initrd, dtb):
39 bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
40 if not bootimg_dir:
41 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
42
43 if initrd:
44 initrds = initrd.split(';')
45 for rd in initrds:
46 cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, rd, hdddir)
47 out = exec_cmd(cp_cmd, True)
48 logger.debug("initrd files:\n%s" % (out))
49 else:
50 logger.debug("Ignoring missing initrd")
51
52 if dtb:
53 if ';' in dtb:
54 raise WicError("Only one DTB supported, exiting")
55 cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, dtb, hdddir)
56 out = exec_cmd(cp_cmd, True)
57 logger.debug("dtb files:\n%s" % (out))
58
59 @classmethod
60 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params):
61 """
62 Create loader-specific (grub-efi) config
63 """
64 configfile = creator.ks.bootloader.configfile
65 custom_cfg = None
66 if configfile:
67 custom_cfg = get_custom_config(configfile)
68 if custom_cfg:
69 # Use a custom configuration for grub
70 grubefi_conf = custom_cfg
71 logger.debug("Using custom configuration file "
72 "%s for grub.cfg", configfile)
73 else:
74 raise WicError("configfile is specified but failed to "
75 "get it from %s." % configfile)
76
77 initrd = source_params.get('initrd')
78 dtb = source_params.get('dtb')
79
80 cls._copy_additional_files(hdddir, initrd, dtb)
81
82 if not custom_cfg:
83 # Create grub configuration using parameters from wks file
84 bootloader = creator.ks.bootloader
85 title = source_params.get('title')
86
87 grubefi_conf = ""
88 grubefi_conf += "serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1\n"
89 grubefi_conf += "default=boot\n"
90 grubefi_conf += "timeout=%s\n" % bootloader.timeout
91 grubefi_conf += "menuentry '%s'{\n" % (title if title else "boot")
92
93 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
94 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
95 if get_bitbake_var("INITRAMFS_IMAGE"):
96 kernel = "%s-%s.bin" % \
97 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
98
99 label = source_params.get('label')
100 label_conf = "root=%s" % creator.rootdev
101 if label:
102 label_conf = "LABEL=%s" % label
103
104 grubefi_conf += "linux /%s %s rootwait %s\n" \
105 % (kernel, label_conf, bootloader.append)
106
107 if initrd:
108 initrds = initrd.split(';')
109 grubefi_conf += "initrd"
110 for rd in initrds:
111 grubefi_conf += " /%s" % rd
112 grubefi_conf += "\n"
113
114 if dtb:
115 grubefi_conf += "devicetree /%s\n" % dtb
116
117 grubefi_conf += "}\n"
118
119 logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg",
120 cr_workdir)
121 cfg = open("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, "w")
122 cfg.write(grubefi_conf)
123 cfg.close()
124
125 @classmethod
126 def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params):
127 """
128 Create loader-specific systemd-boot/gummiboot config. Unified Kernel Image (uki)
129 support is done in image recipe with uki.bbclass and only systemd-boot loader config
130 and ESP partition structure is created here.
131 """
132 # detect uki.bbclass usage
133 image_classes = get_bitbake_var("IMAGE_CLASSES").split()
134 unified_image = False
135 if "uki" in image_classes:
136 unified_image = True
137
138 install_cmd = "install -d %s/loader" % hdddir
139 exec_cmd(install_cmd)
140
141 install_cmd = "install -d %s/loader/entries" % hdddir
142 exec_cmd(install_cmd)
143
144 bootloader = creator.ks.bootloader
145 loader_conf = ""
146
147 # 5 seconds is a sensible default timeout
148 loader_conf += "timeout %d\n" % (bootloader.timeout or 5)
149
150 logger.debug("Writing systemd-boot config "
151 "%s/hdd/boot/loader/loader.conf", cr_workdir)
152 cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w")
153 cfg.write(loader_conf)
154 logger.debug("loader.conf:\n%s" % (loader_conf))
155 cfg.close()
156
157 initrd = source_params.get('initrd')
158 dtb = source_params.get('dtb')
159 if not unified_image:
160 cls._copy_additional_files(hdddir, initrd, dtb)
161
162 configfile = creator.ks.bootloader.configfile
163 custom_cfg = None
164 boot_conf = ""
165 if configfile:
166 custom_cfg = get_custom_config(configfile)
167 if custom_cfg:
168 # Use a custom configuration for systemd-boot
169 boot_conf = custom_cfg
170 logger.debug("Using custom configuration file "
171 "%s for systemd-boots's boot.conf", configfile)
172 else:
173 raise WicError("configfile is specified but failed to "
174 "get it from %s.", configfile)
175 else:
176 # Create systemd-boot configuration using parameters from wks file
177 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
178 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
179 if get_bitbake_var("INITRAMFS_IMAGE"):
180 kernel = "%s-%s.bin" % \
181 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
182
183 title = source_params.get('title')
184
185 boot_conf += "title %s\n" % (title if title else "boot")
186 boot_conf += "linux /%s\n" % kernel
187
188 label = source_params.get('label')
189 label_conf = "LABEL=Boot root=%s" % creator.rootdev
190 if label:
191 label_conf = "LABEL=%s" % label
192
193 boot_conf += "options %s %s\n" % \
194 (label_conf, bootloader.append)
195
196 if initrd:
197 initrds = initrd.split(';')
198 for rd in initrds:
199 boot_conf += "initrd /%s\n" % rd
200
201 if dtb:
202 boot_conf += "devicetree /%s\n" % dtb
203
204 if not unified_image:
205 logger.debug("Writing systemd-boot config "
206 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir)
207 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w")
208 cfg.write(boot_conf)
209 logger.debug("boot.conf:\n%s" % (boot_conf))
210 cfg.close()
211
212
213 @classmethod
214 def do_configure_partition(cls, part, source_params, creator, cr_workdir,
215 oe_builddir, bootimg_dir, kernel_dir,
216 native_sysroot):
217 """
218 Called before do_prepare_partition(), creates loader-specific config
219 """
220 hdddir = "%s/hdd/boot" % cr_workdir
221
222 install_cmd = "install -d %s/EFI/BOOT" % hdddir
223 exec_cmd(install_cmd)
224
225 try:
226 if source_params['loader'] == 'grub-efi':
227 cls.do_configure_grubefi(hdddir, creator, cr_workdir, source_params)
228 elif source_params['loader'] == 'systemd-boot':
229 cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params)
230 elif source_params['loader'] == 'uefi-kernel':
231 pass
232 else:
233 raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader'])
234 except KeyError:
235 raise WicError("bootimg-efi requires a loader, none specified")
236
237 if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None:
238 logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES')
239 else:
240 boot_files = None
241 for (fmt, id) in (("_uuid-%s", part.uuid), ("_label-%s", part.label), (None, None)):
242 if fmt:
243 var = fmt % id
244 else:
245 var = ""
246
247 boot_files = get_bitbake_var("IMAGE_EFI_BOOT_FILES" + var)
248 if boot_files:
249 break
250
251 logger.debug('Boot files: %s', boot_files)
252
253 # list of tuples (src_name, dst_name)
254 deploy_files = []
255 for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files):
256 if ';' in src_entry:
257 dst_entry = tuple(src_entry.split(';'))
258 if not dst_entry[0] or not dst_entry[1]:
259 raise WicError('Malformed boot file entry: %s' % src_entry)
260 else:
261 dst_entry = (src_entry, src_entry)
262
263 logger.debug('Destination entry: %r', dst_entry)
264 deploy_files.append(dst_entry)
265
266 cls.install_task = [];
267 for deploy_entry in deploy_files:
268 src, dst = deploy_entry
269 if '*' in src:
270 # by default install files under their basename
271 entry_name_fn = os.path.basename
272 if dst != src:
273 # unless a target name was given, then treat name
274 # as a directory and append a basename
275 entry_name_fn = lambda name: \
276 os.path.join(dst,
277 os.path.basename(name))
278
279 srcs = glob(os.path.join(kernel_dir, src))
280
281 logger.debug('Globbed sources: %s', ', '.join(srcs))
282 for entry in srcs:
283 src = os.path.relpath(entry, kernel_dir)
284 entry_dst_name = entry_name_fn(entry)
285 cls.install_task.append((src, entry_dst_name))
286 else:
287 cls.install_task.append((src, dst))
288
289 @classmethod
290 def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
291 oe_builddir, bootimg_dir, kernel_dir,
292 rootfs_dir, native_sysroot):
293 """
294 Called to do the actual content population for a partition i.e. it
295 'prepares' the partition to be incorporated into the image.
296 In this case, prepare content for an EFI (grub) boot partition.
297 """
298 if not kernel_dir:
299 kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
300 if not kernel_dir:
301 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
302
303 staging_kernel_dir = kernel_dir
304
305 hdddir = "%s/hdd/boot" % cr_workdir
306
307 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
308 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
309 if get_bitbake_var("INITRAMFS_IMAGE"):
310 kernel = "%s-%s.bin" % \
311 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
312
313 if source_params.get('create-unified-kernel-image') == "true":
314 raise WicError("create-unified-kernel-image is no longer supported. Please use uki.bbclass.")
315
316 if source_params.get('install-kernel-into-boot-dir') != 'false':
317 install_cmd = "install -v -p -m 0644 %s/%s %s/%s" % \
318 (staging_kernel_dir, kernel, hdddir, kernel)
319 out = exec_cmd(install_cmd)
320 logger.debug("Installed kernel files:\n%s" % out)
321
322 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"):
323 for src_path, dst_path in cls.install_task:
324 install_cmd = "install -v -p -m 0644 -D %s %s" \
325 % (os.path.join(kernel_dir, src_path),
326 os.path.join(hdddir, dst_path))
327 out = exec_cmd(install_cmd)
328 logger.debug("Installed IMAGE_EFI_BOOT_FILES:\n%s" % out)
329
330 try:
331 if source_params['loader'] == 'grub-efi':
332 shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir,
333 "%s/grub.cfg" % cr_workdir)
334 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]:
335 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:])
336 exec_cmd(cp_cmd, True)
337 shutil.move("%s/grub.cfg" % cr_workdir,
338 "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir)
339 elif source_params['loader'] == 'systemd-boot':
340 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]:
341 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:])
342 out = exec_cmd(cp_cmd, True)
343 logger.debug("systemd-boot files:\n%s" % out)
344 elif source_params['loader'] == 'uefi-kernel':
345 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
346 if not kernel:
347 raise WicError("Empty KERNEL_IMAGETYPE")
348 target = get_bitbake_var("TARGET_SYS")
349 if not target:
350 raise WicError("Empty TARGET_SYS")
351
352 if re.match("x86_64", target):
353 kernel_efi_image = "bootx64.efi"
354 elif re.match('i.86', target):
355 kernel_efi_image = "bootia32.efi"
356 elif re.match('aarch64', target):
357 kernel_efi_image = "bootaa64.efi"
358 elif re.match('arm', target):
359 kernel_efi_image = "bootarm.efi"
360 else:
361 raise WicError("UEFI stub kernel is incompatible with target %s" % target)
362
363 for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]:
364 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image)
365 out = exec_cmd(cp_cmd, True)
366 logger.debug("uefi-kernel files:\n%s" % out)
367 else:
368 raise WicError("unrecognized bootimg-efi loader: %s" %
369 source_params['loader'])
370
371 # must have installed at least one EFI bootloader
372 out = glob(os.path.join(hdddir, 'EFI', 'BOOT', 'boot*.efi'))
373 logger.debug("Installed EFI loader files:\n%s" % out)
374 if not out:
375 raise WicError("No EFI loaders installed to ESP partition. Check that grub-efi, systemd-boot or similar is installed.")
376
377 except KeyError:
378 raise WicError("bootimg-efi requires a loader, none specified")
379
380 startup = os.path.join(kernel_dir, "startup.nsh")
381 if os.path.exists(startup):
382 cp_cmd = "cp -v -p %s %s/" % (startup, hdddir)
383 out = exec_cmd(cp_cmd, True)
384 logger.debug("startup files:\n%s" % out)
385
386 for paths in part.include_path or []:
387 for path in paths:
388 cp_cmd = "cp -v -p -r %s %s/" % (path, hdddir)
389 exec_cmd(cp_cmd, True)
390 logger.debug("include_path files:\n%s" % out)
391
392 du_cmd = "du -bks %s" % hdddir
393 out = exec_cmd(du_cmd)
394 blocks = int(out.split()[0])
395
396 extra_blocks = part.get_extra_block_count(blocks)
397
398 if extra_blocks < BOOTDD_EXTRA_SPACE:
399 extra_blocks = BOOTDD_EXTRA_SPACE
400
401 blocks += extra_blocks
402
403 logger.debug("Added %d extra blocks to %s to get to %d total blocks",
404 extra_blocks, part.mountpoint, blocks)
405
406 # required for compatibility with certain devices expecting file system
407 # block count to be equal to partition block count
408 if blocks < part.fixed_size:
409 blocks = part.fixed_size
410 logger.debug("Overriding %s to %d total blocks for compatibility",
411 part.mountpoint, blocks)
412
413 # dosfs image, created by mkdosfs
414 bootimg = "%s/boot.img" % cr_workdir
415
416 label = part.label if part.label else "ESP"
417
418 dosfs_cmd = "mkdosfs -v -n %s -i %s -C %s %d" % \
419 (label, part.fsuuid, bootimg, blocks)
420 exec_native_cmd(dosfs_cmd, native_sysroot)
421 logger.debug("mkdosfs:\n%s" % (str(out)))
422
423 mcopy_cmd = "mcopy -v -p -i %s -s %s/* ::/" % (bootimg, hdddir)
424 out = exec_native_cmd(mcopy_cmd, native_sysroot)
425 logger.debug("mcopy:\n%s" % (str(out)))
426
427 chmod_cmd = "chmod 644 %s" % bootimg
428 exec_cmd(chmod_cmd)
429
430 du_cmd = "du -Lbks %s" % bootimg
431 out = exec_cmd(du_cmd)
432 bootimg_size = out.split()[0]
433
434 part.size = int(bootimg_size)
435 part.source_file = bootimg
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg-partition.py
deleted file mode 100644
index 589853a439..0000000000
--- a/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ /dev/null
@@ -1,162 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7# This implements the 'bootimg-partition' source plugin class for
8# 'wic'. The plugin creates an image of boot partition, copying over
9# files listed in IMAGE_BOOT_FILES bitbake variable.
10#
11# AUTHORS
12# Maciej Borzecki <maciej.borzecki (at] open-rnd.pl>
13#
14
15import logging
16import os
17import re
18
19from oe.bootfiles import get_boot_files
20
21from wic import WicError
22from wic.engine import get_custom_config
23from wic.pluginbase import SourcePlugin
24from wic.misc import exec_cmd, get_bitbake_var
25
26logger = logging.getLogger('wic')
27
28class BootimgPartitionPlugin(SourcePlugin):
29 """
30 Create an image of boot partition, copying over files
31 listed in IMAGE_BOOT_FILES bitbake variable.
32 """
33
34 name = 'bootimg-partition'
35 image_boot_files_var_name = 'IMAGE_BOOT_FILES'
36
37 @classmethod
38 def do_configure_partition(cls, part, source_params, cr, cr_workdir,
39 oe_builddir, bootimg_dir, kernel_dir,
40 native_sysroot):
41 """
42 Called before do_prepare_partition(), create u-boot specific boot config
43 """
44 hdddir = "%s/boot.%d" % (cr_workdir, part.lineno)
45 install_cmd = "install -d %s" % hdddir
46 exec_cmd(install_cmd)
47
48 if not kernel_dir:
49 kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
50 if not kernel_dir:
51 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
52
53 boot_files = None
54 for (fmt, id) in (("_uuid-%s", part.uuid), ("_label-%s", part.label), (None, None)):
55 if fmt:
56 var = fmt % id
57 else:
58 var = ""
59
60 boot_files = get_bitbake_var(cls.image_boot_files_var_name + var)
61 if boot_files is not None:
62 break
63
64 if boot_files is None:
65 raise WicError('No boot files defined, %s unset for entry #%d' % (cls.image_boot_files_var_name, part.lineno))
66
67 logger.debug('Boot files: %s', boot_files)
68
69 cls.install_task = get_boot_files(kernel_dir, boot_files)
70 if source_params.get('loader') != "u-boot":
71 return
72
73 configfile = cr.ks.bootloader.configfile
74 custom_cfg = None
75 if configfile:
76 custom_cfg = get_custom_config(configfile)
77 if custom_cfg:
78 # Use a custom configuration for extlinux.conf
79 extlinux_conf = custom_cfg
80 logger.debug("Using custom configuration file "
81 "%s for extlinux.conf", configfile)
82 else:
83 raise WicError("configfile is specified but failed to "
84 "get it from %s." % configfile)
85
86 if not custom_cfg:
87 # The kernel types supported by the sysboot of u-boot
88 kernel_types = ["zImage", "Image", "fitImage", "uImage", "vmlinux"]
89 has_dtb = False
90 fdt_dir = '/'
91 kernel_name = None
92
93 # Find the kernel image name, from the highest precedence to lowest
94 for image in kernel_types:
95 for task in cls.install_task:
96 src, dst = task
97 if re.match(image, src):
98 kernel_name = os.path.join('/', dst)
99 break
100 if kernel_name:
101 break
102
103 for task in cls.install_task:
104 src, dst = task
105 # We suppose that all the dtb are in the same directory
106 if re.search(r'\.dtb', src) and fdt_dir == '/':
107 has_dtb = True
108 fdt_dir = os.path.join(fdt_dir, os.path.dirname(dst))
109 break
110
111 if not kernel_name:
112 raise WicError('No kernel file found')
113
114 # Compose the extlinux.conf
115 extlinux_conf = "default Yocto\n"
116 extlinux_conf += "label Yocto\n"
117 extlinux_conf += " kernel %s\n" % kernel_name
118 if has_dtb:
119 extlinux_conf += " fdtdir %s\n" % fdt_dir
120 bootloader = cr.ks.bootloader
121 extlinux_conf += "append root=%s rootwait %s\n" \
122 % (cr.rootdev, bootloader.append if bootloader.append else '')
123
124 install_cmd = "install -d %s/extlinux/" % hdddir
125 exec_cmd(install_cmd)
126 cfg = open("%s/extlinux/extlinux.conf" % hdddir, "w")
127 cfg.write(extlinux_conf)
128 cfg.close()
129
130
131 @classmethod
132 def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
133 oe_builddir, bootimg_dir, kernel_dir,
134 rootfs_dir, native_sysroot):
135 """
136 Called to do the actual content population for a partition i.e. it
137 'prepares' the partition to be incorporated into the image.
138 In this case, does the following:
139 - sets up a vfat partition
140 - copies all files listed in IMAGE_BOOT_FILES variable
141 """
142 hdddir = "%s/boot.%d" % (cr_workdir, part.lineno)
143
144 if not kernel_dir:
145 kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
146 if not kernel_dir:
147 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
148
149 logger.debug('Kernel dir: %s', bootimg_dir)
150
151
152 for task in cls.install_task:
153 src_path, dst_path = task
154 logger.debug('Install %s as %s', src_path, dst_path)
155 install_cmd = "install -m 0644 -D %s %s" \
156 % (os.path.join(kernel_dir, src_path),
157 os.path.join(hdddir, dst_path))
158 exec_cmd(install_cmd)
159
160 logger.debug('Prepare boot partition using rootfs in %s', hdddir)
161 part.prepare_rootfs(cr_workdir, oe_builddir, hdddir,
162 native_sysroot, False)
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg-pcbios.py
deleted file mode 100644
index a207a83530..0000000000
--- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ /dev/null
@@ -1,209 +0,0 @@
1#
2# Copyright (c) 2014, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7# This implements the 'bootimg-pcbios' source plugin class for 'wic'
8#
9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
11#
12
13import logging
14import os
15import re
16
17from wic import WicError
18from wic.engine import get_custom_config
19from wic.pluginbase import SourcePlugin
20from wic.misc import (exec_cmd, exec_native_cmd,
21 get_bitbake_var, BOOTDD_EXTRA_SPACE)
22
23logger = logging.getLogger('wic')
24
25class BootimgPcbiosPlugin(SourcePlugin):
26 """
27 Create MBR boot partition and install syslinux on it.
28 """
29
30 name = 'bootimg-pcbios'
31
32 @classmethod
33 def _get_bootimg_dir(cls, bootimg_dir, dirname):
34 """
35 Check if dirname exists in default bootimg_dir or in STAGING_DIR.
36 """
37 staging_datadir = get_bitbake_var("STAGING_DATADIR")
38 for result in (bootimg_dir, staging_datadir):
39 if os.path.exists("%s/%s" % (result, dirname)):
40 return result
41
42 # STAGING_DATADIR is expanded with MLPREFIX if multilib is enabled
43 # but dependency syslinux is still populated to original STAGING_DATADIR
44 nonarch_datadir = re.sub('/[^/]*recipe-sysroot', '/recipe-sysroot', staging_datadir)
45 if os.path.exists(os.path.join(nonarch_datadir, dirname)):
46 return nonarch_datadir
47
48 raise WicError("Couldn't find correct bootimg_dir, exiting")
49
50 @classmethod
51 def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,
52 bootimg_dir, kernel_dir, native_sysroot):
53 """
54 Called after all partitions have been prepared and assembled into a
55 disk image. In this case, we install the MBR.
56 """
57 bootimg_dir = cls._get_bootimg_dir(bootimg_dir, 'syslinux')
58 mbrfile = "%s/syslinux/" % bootimg_dir
59 if creator.ptable_format == 'msdos':
60 mbrfile += "mbr.bin"
61 elif creator.ptable_format == 'gpt':
62 mbrfile += "gptmbr.bin"
63 else:
64 raise WicError("Unsupported partition table: %s" %
65 creator.ptable_format)
66
67 if not os.path.exists(mbrfile):
68 raise WicError("Couldn't find %s. If using the -e option, do you "
69 "have the right MACHINE set in local.conf? If not, "
70 "is the bootimg_dir path correct?" % mbrfile)
71
72 full_path = creator._full_path(workdir, disk_name, "direct")
73 logger.debug("Installing MBR on disk %s as %s with size %s bytes",
74 disk_name, full_path, disk.min_size)
75
76 dd_cmd = "dd if=%s of=%s conv=notrunc" % (mbrfile, full_path)
77 exec_cmd(dd_cmd, native_sysroot)
78
79 @classmethod
80 def do_configure_partition(cls, part, source_params, creator, cr_workdir,
81 oe_builddir, bootimg_dir, kernel_dir,
82 native_sysroot):
83 """
84 Called before do_prepare_partition(), creates syslinux config
85 """
86 hdddir = "%s/hdd/boot" % cr_workdir
87
88 install_cmd = "install -d %s" % hdddir
89 exec_cmd(install_cmd)
90
91 bootloader = creator.ks.bootloader
92
93 custom_cfg = None
94 if bootloader.configfile:
95 custom_cfg = get_custom_config(bootloader.configfile)
96 if custom_cfg:
97 # Use a custom configuration for grub
98 syslinux_conf = custom_cfg
99 logger.debug("Using custom configuration file %s "
100 "for syslinux.cfg", bootloader.configfile)
101 else:
102 raise WicError("configfile is specified but failed to "
103 "get it from %s." % bootloader.configfile)
104
105 if not custom_cfg:
106 # Create syslinux configuration using parameters from wks file
107 splash = os.path.join(cr_workdir, "/hdd/boot/splash.jpg")
108 if os.path.exists(splash):
109 splashline = "menu background splash.jpg"
110 else:
111 splashline = ""
112
113 syslinux_conf = ""
114 syslinux_conf += "PROMPT 0\n"
115 syslinux_conf += "TIMEOUT " + str(bootloader.timeout) + "\n"
116 syslinux_conf += "\n"
117 syslinux_conf += "ALLOWOPTIONS 1\n"
118 syslinux_conf += "SERIAL 0 115200\n"
119 syslinux_conf += "\n"
120 if splashline:
121 syslinux_conf += "%s\n" % splashline
122 syslinux_conf += "DEFAULT boot\n"
123 syslinux_conf += "LABEL boot\n"
124
125 kernel = "/" + get_bitbake_var("KERNEL_IMAGETYPE")
126 syslinux_conf += "KERNEL " + kernel + "\n"
127
128 syslinux_conf += "APPEND label=boot root=%s %s\n" % \
129 (creator.rootdev, bootloader.append)
130
131 logger.debug("Writing syslinux config %s/hdd/boot/syslinux.cfg",
132 cr_workdir)
133 cfg = open("%s/hdd/boot/syslinux.cfg" % cr_workdir, "w")
134 cfg.write(syslinux_conf)
135 cfg.close()
136
137 @classmethod
138 def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
139 oe_builddir, bootimg_dir, kernel_dir,
140 rootfs_dir, native_sysroot):
141 """
142 Called to do the actual content population for a partition i.e. it
143 'prepares' the partition to be incorporated into the image.
144 In this case, prepare content for legacy bios boot partition.
145 """
146 bootimg_dir = cls._get_bootimg_dir(bootimg_dir, 'syslinux')
147
148 staging_kernel_dir = kernel_dir
149
150 hdddir = "%s/hdd/boot" % cr_workdir
151
152 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
153 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
154 if get_bitbake_var("INITRAMFS_IMAGE"):
155 kernel = "%s-%s.bin" % \
156 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
157
158 cmds = ("install -m 0644 %s/%s %s/%s" %
159 (staging_kernel_dir, kernel, hdddir, get_bitbake_var("KERNEL_IMAGETYPE")),
160 "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" %
161 (bootimg_dir, hdddir),
162 "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" %
163 (bootimg_dir, hdddir),
164 "install -m 444 %s/syslinux/libcom32.c32 %s/libcom32.c32" %
165 (bootimg_dir, hdddir),
166 "install -m 444 %s/syslinux/libutil.c32 %s/libutil.c32" %
167 (bootimg_dir, hdddir))
168
169 for install_cmd in cmds:
170 exec_cmd(install_cmd)
171
172 du_cmd = "du -bks %s" % hdddir
173 out = exec_cmd(du_cmd)
174 blocks = int(out.split()[0])
175
176 extra_blocks = part.get_extra_block_count(blocks)
177
178 if extra_blocks < BOOTDD_EXTRA_SPACE:
179 extra_blocks = BOOTDD_EXTRA_SPACE
180
181 blocks += extra_blocks
182
183 logger.debug("Added %d extra blocks to %s to get to %d total blocks",
184 extra_blocks, part.mountpoint, blocks)
185
186 # dosfs image, created by mkdosfs
187 bootimg = "%s/boot%s.img" % (cr_workdir, part.lineno)
188
189 label = part.label if part.label else "boot"
190
191 dosfs_cmd = "mkdosfs -n %s -i %s -S 512 -C %s %d" % \
192 (label, part.fsuuid, bootimg, blocks)
193 exec_native_cmd(dosfs_cmd, native_sysroot)
194
195 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir)
196 exec_native_cmd(mcopy_cmd, native_sysroot)
197
198 syslinux_cmd = "syslinux %s" % bootimg
199 exec_native_cmd(syslinux_cmd, native_sysroot)
200
201 chmod_cmd = "chmod 644 %s" % bootimg
202 exec_cmd(chmod_cmd)
203
204 du_cmd = "du -Lbks %s" % bootimg
205 out = exec_cmd(du_cmd)
206 bootimg_size = out.split()[0]
207
208 part.size = int(bootimg_size)
209 part.source_file = bootimg
diff --git a/scripts/lib/wic/plugins/source/empty.py b/scripts/lib/wic/plugins/source/empty.py
deleted file mode 100644
index 4178912377..0000000000
--- a/scripts/lib/wic/plugins/source/empty.py
+++ /dev/null
@@ -1,89 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# The empty wic plugin is used to create unformatted empty partitions for wic
8# images.
9# To use it you must pass "empty" as argument for the "--source" parameter in
10# the wks file. For example:
11# part foo --source empty --ondisk sda --size="1024" --align 1024
12#
13# The plugin supports writing zeros to the start of the
14# partition. This is useful to overwrite old content like
15# filesystem signatures which may be re-recognized otherwise.
16# This feature can be enabled with
17# '--sourceparams="[fill|size=<N>[S|s|K|k|M|G]][,][bs=<N>[S|s|K|k|M|G]]"'
18# Conflicting or missing options throw errors.
19
20import logging
21import os
22
23from wic import WicError
24from wic.ksparser import sizetype
25from wic.pluginbase import SourcePlugin
26
27logger = logging.getLogger('wic')
28
29class EmptyPartitionPlugin(SourcePlugin):
30 """
31 Populate unformatted empty partition.
32
33 The following sourceparams are supported:
34 - fill
35 Fill the entire partition with zeros. Requires '--fixed-size' option
36 to be set.
37 - size=<N>[S|s|K|k|M|G]
38 Set the first N bytes of the partition to zero. Default unit is 'K'.
39 - bs=<N>[S|s|K|k|M|G]
40 Write at most N bytes at a time during source file creation.
41 Defaults to '1M'. Default unit is 'K'.
42 """
43
44 name = 'empty'
45
46 @classmethod
47 def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
48 oe_builddir, bootimg_dir, kernel_dir,
49 rootfs_dir, native_sysroot):
50 """
51 Called to do the actual content population for a partition i.e. it
52 'prepares' the partition to be incorporated into the image.
53 """
54 get_byte_count = sizetype('K', True)
55 size = 0
56
57 if 'fill' in source_params and 'size' in source_params:
58 raise WicError("Conflicting source parameters 'fill' and 'size' specified, exiting.")
59
60 # Set the size of the zeros to be written to the partition
61 if 'fill' in source_params:
62 if part.fixed_size == 0:
63 raise WicError("Source parameter 'fill' only works with the '--fixed-size' option, exiting.")
64 size = get_byte_count(part.fixed_size)
65 elif 'size' in source_params:
66 size = get_byte_count(source_params['size'])
67
68 if size == 0:
69 # Nothing to do, create empty partition
70 return
71
72 if 'bs' in source_params:
73 bs = get_byte_count(source_params['bs'])
74 else:
75 bs = get_byte_count('1M')
76
77 # Create a binary file of the requested size filled with zeros
78 source_file = os.path.join(cr_workdir, 'empty-plugin-zeros%s.bin' % part.lineno)
79 if not os.path.exists(os.path.dirname(source_file)):
80 os.makedirs(os.path.dirname(source_file))
81
82 quotient, remainder = divmod(size, bs)
83 with open(source_file, 'wb') as file:
84 for _ in range(quotient):
85 file.write(bytearray(bs))
86 file.write(bytearray(remainder))
87
88 part.size = (size + 1024 - 1) // 1024 # size in KB rounded up
89 part.source_file = source_file
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
deleted file mode 100644
index 607356ad13..0000000000
--- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ /dev/null
@@ -1,463 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7# This implements the 'isoimage-isohybrid' source plugin class for 'wic'
8#
9# AUTHORS
10# Mihaly Varga <mihaly.varga (at] ni.com>
11
12import glob
13import logging
14import os
15import re
16import shutil
17
18from wic import WicError
19from wic.engine import get_custom_config
20from wic.pluginbase import SourcePlugin
21from wic.misc import exec_cmd, exec_native_cmd, get_bitbake_var
22
23logger = logging.getLogger('wic')
24
25class IsoImagePlugin(SourcePlugin):
26 """
27 Create a bootable ISO image
28
29 This plugin creates a hybrid, legacy and EFI bootable ISO image. The
30 generated image can be used on optical media as well as USB media.
31
32 Legacy boot uses syslinux and EFI boot uses grub or gummiboot (not
33 implemented yet) as bootloader. The plugin creates the directories required
34 by bootloaders and populates them by creating and configuring the
35 bootloader files.
36
37 Example kickstart file:
38 part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi, \\
39 image_name= IsoImage" --ondisk cd --label LIVECD
40 bootloader --timeout=10 --append=" "
41
42 In --sourceparams "loader" specifies the bootloader used for booting in EFI
43 mode, while "image_name" specifies the name of the generated image. In the
44 example above, wic creates an ISO image named IsoImage-cd.direct (default
45 extension added by direct imeger plugin) and a file named IsoImage-cd.iso
46 """
47
48 name = 'isoimage-isohybrid'
49
50 @classmethod
51 def do_configure_syslinux(cls, creator, cr_workdir):
52 """
53 Create loader-specific (syslinux) config
54 """
55 splash = os.path.join(cr_workdir, "ISO/boot/splash.jpg")
56 if os.path.exists(splash):
57 splashline = "menu background splash.jpg"
58 else:
59 splashline = ""
60
61 bootloader = creator.ks.bootloader
62
63 syslinux_conf = ""
64 syslinux_conf += "PROMPT 0\n"
65 syslinux_conf += "TIMEOUT %s \n" % (bootloader.timeout or 10)
66 syslinux_conf += "\n"
67 syslinux_conf += "ALLOWOPTIONS 1\n"
68 syslinux_conf += "SERIAL 0 115200\n"
69 syslinux_conf += "\n"
70 if splashline:
71 syslinux_conf += "%s\n" % splashline
72 syslinux_conf += "DEFAULT boot\n"
73 syslinux_conf += "LABEL boot\n"
74
75 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
76 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
77 if get_bitbake_var("INITRAMFS_IMAGE"):
78 kernel = "%s-%s.bin" % \
79 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
80
81 syslinux_conf += "KERNEL /" + kernel + "\n"
82 syslinux_conf += "APPEND initrd=/initrd LABEL=boot %s\n" \
83 % bootloader.append
84
85 logger.debug("Writing syslinux config %s/ISO/isolinux/isolinux.cfg",
86 cr_workdir)
87
88 with open("%s/ISO/isolinux/isolinux.cfg" % cr_workdir, "w") as cfg:
89 cfg.write(syslinux_conf)
90
91 @classmethod
92 def do_configure_grubefi(cls, part, creator, target_dir):
93 """
94 Create loader-specific (grub-efi) config
95 """
96 configfile = creator.ks.bootloader.configfile
97 if configfile:
98 grubefi_conf = get_custom_config(configfile)
99 if grubefi_conf:
100 logger.debug("Using custom configuration file %s for grub.cfg",
101 configfile)
102 else:
103 raise WicError("configfile is specified "
104 "but failed to get it from %s", configfile)
105 else:
106 splash = os.path.join(target_dir, "splash.jpg")
107 if os.path.exists(splash):
108 splashline = "menu background splash.jpg"
109 else:
110 splashline = ""
111
112 bootloader = creator.ks.bootloader
113
114 grubefi_conf = ""
115 grubefi_conf += "serial --unit=0 --speed=115200 --word=8 "
116 grubefi_conf += "--parity=no --stop=1\n"
117 grubefi_conf += "default=boot\n"
118 grubefi_conf += "timeout=%s\n" % (bootloader.timeout or 10)
119 grubefi_conf += "\n"
120 grubefi_conf += "search --set=root --label %s " % part.label
121 grubefi_conf += "\n"
122 grubefi_conf += "menuentry 'boot'{\n"
123
124 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
125 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
126 if get_bitbake_var("INITRAMFS_IMAGE"):
127 kernel = "%s-%s.bin" % \
128 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
129
130 grubefi_conf += "linux /%s rootwait %s\n" \
131 % (kernel, bootloader.append)
132 grubefi_conf += "initrd /initrd \n"
133 grubefi_conf += "}\n"
134
135 if splashline:
136 grubefi_conf += "%s\n" % splashline
137
138 cfg_path = os.path.join(target_dir, "grub.cfg")
139 logger.debug("Writing grubefi config %s", cfg_path)
140
141 with open(cfg_path, "w") as cfg:
142 cfg.write(grubefi_conf)
143
144 @staticmethod
145 def _build_initramfs_path(rootfs_dir, cr_workdir):
146 """
147 Create path for initramfs image
148 """
149
150 initrd = get_bitbake_var("INITRD_LIVE") or get_bitbake_var("INITRD")
151 if not initrd:
152 initrd_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
153 if not initrd_dir:
154 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting.")
155
156 image_name = get_bitbake_var("IMAGE_BASENAME")
157 if not image_name:
158 raise WicError("Couldn't find IMAGE_BASENAME, exiting.")
159
160 image_type = get_bitbake_var("INITRAMFS_FSTYPES")
161 if not image_type:
162 raise WicError("Couldn't find INITRAMFS_FSTYPES, exiting.")
163
164 machine = os.path.basename(initrd_dir)
165
166 pattern = '%s/%s*%s.%s' % (initrd_dir, image_name, machine, image_type)
167 files = glob.glob(pattern)
168 if files:
169 initrd = files[0]
170
171 if not initrd or not os.path.exists(initrd):
172 # Create initrd from rootfs directory
173 initrd = "%s/initrd.cpio.gz" % cr_workdir
174 initrd_dir = "%s/INITRD" % cr_workdir
175 shutil.copytree("%s" % rootfs_dir, \
176 "%s" % initrd_dir, symlinks=True)
177
178 if os.path.isfile("%s/init" % rootfs_dir):
179 shutil.copy2("%s/init" % rootfs_dir, "%s/init" % initrd_dir)
180 elif os.path.lexists("%s/init" % rootfs_dir):
181 os.symlink(os.readlink("%s/init" % rootfs_dir), \
182 "%s/init" % initrd_dir)
183 elif os.path.isfile("%s/sbin/init" % rootfs_dir):
184 shutil.copy2("%s/sbin/init" % rootfs_dir, \
185 "%s" % initrd_dir)
186 elif os.path.lexists("%s/sbin/init" % rootfs_dir):
187 os.symlink(os.readlink("%s/sbin/init" % rootfs_dir), \
188 "%s/init" % initrd_dir)
189 else:
190 raise WicError("Couldn't find or build initrd, exiting.")
191
192 exec_cmd("cd %s && find . | cpio -o -H newc -R root:root >%s/initrd.cpio " \
193 % (initrd_dir, cr_workdir), as_shell=True)
194 exec_cmd("gzip -f -9 %s/initrd.cpio" % cr_workdir, as_shell=True)
195 shutil.rmtree(initrd_dir)
196
197 return initrd
198
199 @classmethod
200 def do_configure_partition(cls, part, source_params, creator, cr_workdir,
201 oe_builddir, bootimg_dir, kernel_dir,
202 native_sysroot):
203 """
204 Called before do_prepare_partition(), creates loader-specific config
205 """
206 isodir = "%s/ISO/" % cr_workdir
207
208 if os.path.exists(isodir):
209 shutil.rmtree(isodir)
210
211 install_cmd = "install -d %s " % isodir
212 exec_cmd(install_cmd)
213
214 # Overwrite the name of the created image
215 logger.debug(source_params)
216 if 'image_name' in source_params and \
217 source_params['image_name'].strip():
218 creator.name = source_params['image_name'].strip()
219 logger.debug("The name of the image is: %s", creator.name)
220
221 @staticmethod
222 def _install_payload(source_params, iso_dir):
223 """
224 Copies contents of payload directory (as specified in 'payload_dir' param) into iso_dir
225 """
226
227 if source_params.get('payload_dir'):
228 payload_dir = source_params['payload_dir']
229
230 logger.debug("Payload directory: %s", payload_dir)
231 shutil.copytree(payload_dir, iso_dir, symlinks=True, dirs_exist_ok=True)
232
233 @classmethod
234 def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
235 oe_builddir, bootimg_dir, kernel_dir,
236 rootfs_dir, native_sysroot):
237 """
238 Called to do the actual content population for a partition i.e. it
239 'prepares' the partition to be incorporated into the image.
240 In this case, prepare content for a bootable ISO image.
241 """
242
243 isodir = "%s/ISO" % cr_workdir
244
245 cls._install_payload(source_params, isodir)
246
247 if part.rootfs_dir is None:
248 if not 'ROOTFS_DIR' in rootfs_dir:
249 raise WicError("Couldn't find --rootfs-dir, exiting.")
250 rootfs_dir = rootfs_dir['ROOTFS_DIR']
251 else:
252 if part.rootfs_dir in rootfs_dir:
253 rootfs_dir = rootfs_dir[part.rootfs_dir]
254 elif part.rootfs_dir:
255 rootfs_dir = part.rootfs_dir
256 else:
257 raise WicError("Couldn't find --rootfs-dir=%s connection "
258 "or it is not a valid path, exiting." %
259 part.rootfs_dir)
260
261 if not os.path.isdir(rootfs_dir):
262 rootfs_dir = get_bitbake_var("IMAGE_ROOTFS")
263 if not os.path.isdir(rootfs_dir):
264 raise WicError("Couldn't find IMAGE_ROOTFS, exiting.")
265
266 part.rootfs_dir = rootfs_dir
267 deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
268 img_iso_dir = get_bitbake_var("ISODIR")
269
270 # Remove the temporary file created by part.prepare_rootfs()
271 if os.path.isfile(part.source_file):
272 os.remove(part.source_file)
273
274 # Support using a different initrd other than default
275 if source_params.get('initrd'):
276 initrd = source_params['initrd']
277 if not deploy_dir:
278 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
279 cp_cmd = "cp %s/%s %s" % (deploy_dir, initrd, cr_workdir)
280 exec_cmd(cp_cmd)
281 else:
282 # Prepare initial ramdisk
283 initrd = "%s/initrd" % deploy_dir
284 if not os.path.isfile(initrd):
285 initrd = "%s/initrd" % img_iso_dir
286 if not os.path.isfile(initrd):
287 initrd = cls._build_initramfs_path(rootfs_dir, cr_workdir)
288
289 install_cmd = "install -m 0644 %s %s/initrd" % (initrd, isodir)
290 exec_cmd(install_cmd)
291
292 # Remove the temporary file created by _build_initramfs_path function
293 if os.path.isfile("%s/initrd.cpio.gz" % cr_workdir):
294 os.remove("%s/initrd.cpio.gz" % cr_workdir)
295
296 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
297 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
298 if get_bitbake_var("INITRAMFS_IMAGE"):
299 kernel = "%s-%s.bin" % \
300 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
301
302 install_cmd = "install -m 0644 %s/%s %s/%s" % \
303 (kernel_dir, kernel, isodir, kernel)
304 exec_cmd(install_cmd)
305
306 #Create bootloader for efi boot
307 try:
308 target_dir = "%s/EFI/BOOT" % isodir
309 if os.path.exists(target_dir):
310 shutil.rmtree(target_dir)
311
312 os.makedirs(target_dir)
313
314 if source_params['loader'] == 'grub-efi':
315 # Builds bootx64.efi/bootia32.efi if ISODIR didn't exist or
316 # didn't contains it
317 target_arch = get_bitbake_var("TARGET_SYS")
318 if not target_arch:
319 raise WicError("Coludn't find target architecture")
320
321 if re.match("x86_64", target_arch):
322 grub_src_image = "grub-efi-bootx64.efi"
323 grub_dest_image = "bootx64.efi"
324 elif re.match('i.86', target_arch):
325 grub_src_image = "grub-efi-bootia32.efi"
326 grub_dest_image = "bootia32.efi"
327 else:
328 raise WicError("grub-efi is incompatible with target %s" %
329 target_arch)
330
331 grub_target = os.path.join(target_dir, grub_dest_image)
332 if not os.path.isfile(grub_target):
333 grub_src = os.path.join(deploy_dir, grub_src_image)
334 if not os.path.exists(grub_src):
335 raise WicError("Grub loader %s is not found in %s. "
336 "Please build grub-efi first" % (grub_src_image, deploy_dir))
337 shutil.copy(grub_src, grub_target)
338
339 if not os.path.isfile(os.path.join(target_dir, "boot.cfg")):
340 cls.do_configure_grubefi(part, creator, target_dir)
341
342 else:
343 raise WicError("unrecognized bootimg-efi loader: %s" %
344 source_params['loader'])
345 except KeyError:
346 raise WicError("bootimg-efi requires a loader, none specified")
347
348 # Create efi.img that contains bootloader files for EFI booting
349 # if ISODIR didn't exist or didn't contains it
350 if os.path.isfile("%s/efi.img" % img_iso_dir):
351 install_cmd = "install -m 0644 %s/efi.img %s/efi.img" % \
352 (img_iso_dir, isodir)
353 exec_cmd(install_cmd)
354 else:
355 # Default to 100 blocks of extra space for file system overhead
356 esp_extra_blocks = int(source_params.get('esp_extra_blocks', '100'))
357
358 du_cmd = "du -bks %s/EFI" % isodir
359 out = exec_cmd(du_cmd)
360 blocks = int(out.split()[0])
361 blocks += esp_extra_blocks
362 logger.debug("Added 100 extra blocks to %s to get to %d "
363 "total blocks", part.mountpoint, blocks)
364
365 # dosfs image for EFI boot
366 bootimg = "%s/efi.img" % isodir
367
368 esp_label = source_params.get('esp_label', 'EFIimg')
369
370 dosfs_cmd = 'mkfs.vfat -n \'%s\' -S 512 -C %s %d' \
371 % (esp_label, bootimg, blocks)
372 exec_native_cmd(dosfs_cmd, native_sysroot)
373
374 mmd_cmd = "mmd -i %s ::/EFI" % bootimg
375 exec_native_cmd(mmd_cmd, native_sysroot)
376
377 mcopy_cmd = "mcopy -i %s -s %s/EFI/* ::/EFI/" \
378 % (bootimg, isodir)
379 exec_native_cmd(mcopy_cmd, native_sysroot)
380
381 chmod_cmd = "chmod 644 %s" % bootimg
382 exec_cmd(chmod_cmd)
383
384 # Prepare files for legacy boot
385 syslinux_dir = get_bitbake_var("STAGING_DATADIR")
386 if not syslinux_dir:
387 raise WicError("Couldn't find STAGING_DATADIR, exiting.")
388
389 if os.path.exists("%s/isolinux" % isodir):
390 shutil.rmtree("%s/isolinux" % isodir)
391
392 install_cmd = "install -d %s/isolinux" % isodir
393 exec_cmd(install_cmd)
394
395 cls.do_configure_syslinux(creator, cr_workdir)
396
397 install_cmd = "install -m 444 %s/syslinux/ldlinux.sys " % syslinux_dir
398 install_cmd += "%s/isolinux/ldlinux.sys" % isodir
399 exec_cmd(install_cmd)
400
401 install_cmd = "install -m 444 %s/syslinux/isohdpfx.bin " % syslinux_dir
402 install_cmd += "%s/isolinux/isohdpfx.bin" % isodir
403 exec_cmd(install_cmd)
404
405 install_cmd = "install -m 644 %s/syslinux/isolinux.bin " % syslinux_dir
406 install_cmd += "%s/isolinux/isolinux.bin" % isodir
407 exec_cmd(install_cmd)
408
409 install_cmd = "install -m 644 %s/syslinux/ldlinux.c32 " % syslinux_dir
410 install_cmd += "%s/isolinux/ldlinux.c32" % isodir
411 exec_cmd(install_cmd)
412
413 #create ISO image
414 iso_img = "%s/tempiso_img.iso" % cr_workdir
415 iso_bootimg = "isolinux/isolinux.bin"
416 iso_bootcat = "isolinux/boot.cat"
417 efi_img = "efi.img"
418
419 mkisofs_cmd = "mkisofs -V %s " % part.label
420 mkisofs_cmd += "-o %s -U " % iso_img
421 mkisofs_cmd += "-J -joliet-long -r -iso-level 2 -b %s " % iso_bootimg
422 mkisofs_cmd += "-c %s -no-emul-boot -boot-load-size 4 " % iso_bootcat
423 mkisofs_cmd += "-boot-info-table -eltorito-alt-boot "
424 mkisofs_cmd += "-eltorito-platform 0xEF -eltorito-boot %s " % efi_img
425 mkisofs_cmd += "-no-emul-boot %s " % isodir
426
427 logger.debug("running command: %s", mkisofs_cmd)
428 exec_native_cmd(mkisofs_cmd, native_sysroot)
429
430 shutil.rmtree(isodir)
431
432 du_cmd = "du -Lbks %s" % iso_img
433 out = exec_cmd(du_cmd)
434 isoimg_size = int(out.split()[0])
435
436 part.size = isoimg_size
437 part.source_file = iso_img
438
439 @classmethod
440 def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,
441 bootimg_dir, kernel_dir, native_sysroot):
442 """
443 Called after all partitions have been prepared and assembled into a
444 disk image. In this case, we insert/modify the MBR using isohybrid
445 utility for booting via BIOS from disk storage devices.
446 """
447
448 iso_img = "%s.p1" % disk.path
449 full_path = creator._full_path(workdir, disk_name, "direct")
450 full_path_iso = creator._full_path(workdir, disk_name, "iso")
451
452 isohybrid_cmd = "isohybrid -u %s" % iso_img
453 logger.debug("running command: %s", isohybrid_cmd)
454 exec_native_cmd(isohybrid_cmd, native_sysroot)
455
456 # Replace the image created by direct plugin with the one created by
457 # mkisofs command. This is necessary because the iso image created by
458 # mkisofs has a very specific MBR is system area of the ISO image, and
459 # direct plugin adds and configures an another MBR.
460 logger.debug("Replaceing the image created by direct plugin\n")
461 os.remove(disk.path)
462 shutil.copy2(iso_img, full_path_iso)
463 shutil.copy2(full_path_iso, full_path)
diff --git a/scripts/lib/wic/plugins/source/rawcopy.py b/scripts/lib/wic/plugins/source/rawcopy.py
deleted file mode 100644
index 21903c2f23..0000000000
--- a/scripts/lib/wic/plugins/source/rawcopy.py
+++ /dev/null
@@ -1,115 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import os
9import signal
10import subprocess
11
12from wic import WicError
13from wic.pluginbase import SourcePlugin
14from wic.misc import exec_cmd, get_bitbake_var
15from wic.filemap import sparse_copy
16
17logger = logging.getLogger('wic')
18
19class RawCopyPlugin(SourcePlugin):
20 """
21 Populate partition content from raw image file.
22 """
23
24 name = 'rawcopy'
25
26 @staticmethod
27 def do_image_label(fstype, dst, label):
28 # don't create label when fstype is none
29 if fstype == 'none':
30 return
31
32 if fstype.startswith('ext'):
33 cmd = 'tune2fs -L %s %s' % (label, dst)
34 elif fstype in ('msdos', 'vfat'):
35 cmd = 'dosfslabel %s %s' % (dst, label)
36 elif fstype == 'btrfs':
37 cmd = 'btrfs filesystem label %s %s' % (dst, label)
38 elif fstype == 'swap':
39 cmd = 'mkswap -L %s %s' % (label, dst)
40 elif fstype in ('squashfs', 'erofs'):
41 raise WicError("It's not possible to update a %s "
42 "filesystem label '%s'" % (fstype, label))
43 else:
44 raise WicError("Cannot update filesystem label: "
45 "Unknown fstype: '%s'" % (fstype))
46
47 exec_cmd(cmd)
48
49 @staticmethod
50 def do_image_uncompression(src, dst, workdir):
51 def subprocess_setup():
52 # Python installs a SIGPIPE handler by default. This is usually not what
53 # non-Python subprocesses expect.
54 # SIGPIPE errors are known issues with gzip/bash
55 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
56
57 extension = os.path.splitext(src)[1]
58 decompressor = {
59 ".bz2": "bzip2",
60 ".gz": "gzip",
61 ".xz": "xz",
62 ".zst": "zstd -f",
63 }.get(extension)
64 if not decompressor:
65 raise WicError("Not supported compressor filename extension: %s" % extension)
66 cmd = "%s -dc %s > %s" % (decompressor, src, dst)
67 subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=workdir)
68
69 @classmethod
70 def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
71 oe_builddir, bootimg_dir, kernel_dir,
72 rootfs_dir, native_sysroot):
73 """
74 Called to do the actual content population for a partition i.e. it
75 'prepares' the partition to be incorporated into the image.
76 """
77 if not kernel_dir:
78 kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
79 if not kernel_dir:
80 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
81
82 logger.debug('Kernel dir: %s', kernel_dir)
83
84 if 'file' not in source_params:
85 raise WicError("No file specified")
86
87 if 'unpack' in source_params:
88 img = os.path.join(kernel_dir, source_params['file'])
89 src = os.path.join(cr_workdir, os.path.splitext(source_params['file'])[0])
90 RawCopyPlugin.do_image_uncompression(img, src, cr_workdir)
91 else:
92 src = os.path.join(kernel_dir, source_params['file'])
93
94 dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno))
95
96 if not os.path.exists(os.path.dirname(dst)):
97 os.makedirs(os.path.dirname(dst))
98
99 if 'skip' in source_params:
100 sparse_copy(src, dst, skip=int(source_params['skip']))
101 else:
102 sparse_copy(src, dst)
103
104 # get the size in the right units for kickstart (kB)
105 du_cmd = "du -Lbks %s" % dst
106 out = exec_cmd(du_cmd)
107 filesize = int(out.split()[0])
108
109 if filesize > part.size:
110 part.size = filesize
111
112 if part.label:
113 RawCopyPlugin.do_image_label(part.fstype, dst, part.label)
114
115 part.source_file = dst
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py
deleted file mode 100644
index 06fce06bb1..0000000000
--- a/scripts/lib/wic/plugins/source/rootfs.py
+++ /dev/null
@@ -1,236 +0,0 @@
1#
2# Copyright (c) 2014, Intel Corporation.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# DESCRIPTION
7# This implements the 'rootfs' source plugin class for 'wic'
8#
9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
11# Joao Henrique Ferreira de Freitas <joaohf (at] gmail.com>
12#
13
14import logging
15import os
16import shutil
17import sys
18
19from oe.path import copyhardlinktree
20from pathlib import Path
21
22from wic import WicError
23from wic.pluginbase import SourcePlugin
24from wic.misc import get_bitbake_var, exec_native_cmd
25
26logger = logging.getLogger('wic')
27
28class RootfsPlugin(SourcePlugin):
29 """
30 Populate partition content from a rootfs directory.
31 """
32
33 name = 'rootfs'
34
35 @staticmethod
36 def __validate_path(cmd, rootfs_dir, path):
37 if os.path.isabs(path):
38 logger.error("%s: Must be relative: %s" % (cmd, path))
39 sys.exit(1)
40
41 # Disallow climbing outside of parent directory using '..',
42 # because doing so could be quite disastrous (we will delete the
43 # directory, or modify a directory outside OpenEmbedded).
44 full_path = os.path.abspath(os.path.join(rootfs_dir, path))
45 if not full_path.startswith(os.path.realpath(rootfs_dir)):
46 logger.error("%s: Must point inside the rootfs: %s" % (cmd, path))
47 sys.exit(1)
48
49 return full_path
50
51 @staticmethod
52 def __get_rootfs_dir(rootfs_dir):
53 if rootfs_dir and os.path.isdir(rootfs_dir):
54 return os.path.realpath(rootfs_dir)
55
56 image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir)
57 if not os.path.isdir(image_rootfs_dir):
58 raise WicError("No valid artifact IMAGE_ROOTFS from image "
59 "named %s has been found at %s, exiting." %
60 (rootfs_dir, image_rootfs_dir))
61
62 return os.path.realpath(image_rootfs_dir)
63
64 @staticmethod
65 def __get_pseudo(native_sysroot, rootfs, pseudo_dir):
66 pseudo = "export PSEUDO_PREFIX=%s/usr;" % native_sysroot
67 pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir
68 pseudo += "export PSEUDO_PASSWD=%s;" % rootfs
69 pseudo += "export PSEUDO_NOSYMLINKEXP=1;"
70 pseudo += "%s " % get_bitbake_var("FAKEROOTCMD")
71 return pseudo
72
73 @classmethod
74 def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
75 oe_builddir, bootimg_dir, kernel_dir,
76 krootfs_dir, native_sysroot):
77 """
78 Called to do the actual content population for a partition i.e. it
79 'prepares' the partition to be incorporated into the image.
80 In this case, prepare content for legacy bios boot partition.
81 """
82 if part.rootfs_dir is None:
83 if not 'ROOTFS_DIR' in krootfs_dir:
84 raise WicError("Couldn't find --rootfs-dir, exiting")
85
86 rootfs_dir = krootfs_dir['ROOTFS_DIR']
87 else:
88 if part.rootfs_dir in krootfs_dir:
89 rootfs_dir = krootfs_dir[part.rootfs_dir]
90 elif part.rootfs_dir:
91 rootfs_dir = part.rootfs_dir
92 else:
93 raise WicError("Couldn't find --rootfs-dir=%s connection or "
94 "it is not a valid path, exiting" % part.rootfs_dir)
95
96 part.rootfs_dir = cls.__get_rootfs_dir(rootfs_dir)
97 part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab"))
98 pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo")
99 if not os.path.lexists(pseudo_dir):
100 pseudo_dir = os.path.join(cls.__get_rootfs_dir(None), '../pseudo')
101
102 if not os.path.lexists(pseudo_dir):
103 logger.warn("%s folder does not exist. "
104 "Usernames and permissions will be invalid " % pseudo_dir)
105 pseudo_dir = None
106
107 new_rootfs = None
108 new_pseudo = None
109 # Handle excluded paths.
110 if part.exclude_path or part.include_path or part.change_directory or part.update_fstab_in_rootfs:
111 # We need a new rootfs directory we can safely modify without
112 # interfering with other tasks. Copy to workdir.
113 new_rootfs = os.path.realpath(os.path.join(cr_workdir, "rootfs%d" % part.lineno))
114
115 if os.path.lexists(new_rootfs):
116 shutil.rmtree(os.path.join(new_rootfs))
117
118 if part.change_directory:
119 cd = part.change_directory
120 if cd[-1] == '/':
121 cd = cd[:-1]
122 orig_dir = cls.__validate_path("--change-directory", part.rootfs_dir, cd)
123 else:
124 orig_dir = part.rootfs_dir
125 copyhardlinktree(orig_dir, new_rootfs)
126
127 # Convert the pseudo directory to its new location
128 if (pseudo_dir):
129 new_pseudo = os.path.realpath(
130 os.path.join(cr_workdir, "pseudo%d" % part.lineno))
131 if os.path.lexists(new_pseudo):
132 shutil.rmtree(new_pseudo)
133 os.mkdir(new_pseudo)
134 shutil.copy(os.path.join(pseudo_dir, "files.db"),
135 os.path.join(new_pseudo, "files.db"))
136
137 pseudo_cmd = "%s -B -m %s -M %s" % (cls.__get_pseudo(native_sysroot,
138 new_rootfs,
139 new_pseudo),
140 orig_dir, new_rootfs)
141 exec_native_cmd(pseudo_cmd, native_sysroot)
142
143 for in_path in part.include_path or []:
144 #parse arguments
145 include_path = in_path[0]
146 if len(in_path) > 2:
147 logger.error("'Invalid number of arguments for include-path")
148 sys.exit(1)
149 if len(in_path) == 2:
150 path = in_path[1]
151 else:
152 path = None
153
154 # Pack files to be included into a tar file.
155 # We need to create a tar file, because that way we can keep the
156 # permissions from the files even when they belong to different
157 # pseudo enviroments.
158 # If we simply copy files using copyhardlinktree/copytree... the
159 # copied files will belong to the user running wic.
160 tar_file = os.path.realpath(
161 os.path.join(cr_workdir, "include-path%d.tar" % part.lineno))
162 if os.path.isfile(include_path):
163 parent = os.path.dirname(os.path.realpath(include_path))
164 tar_cmd = "tar c --owner=root --group=root -f %s -C %s %s" % (
165 tar_file, parent, os.path.relpath(include_path, parent))
166 exec_native_cmd(tar_cmd, native_sysroot)
167 else:
168 if include_path in krootfs_dir:
169 include_path = krootfs_dir[include_path]
170 include_path = cls.__get_rootfs_dir(include_path)
171 include_pseudo = os.path.join(include_path, "../pseudo")
172 if os.path.lexists(include_pseudo):
173 pseudo = cls.__get_pseudo(native_sysroot, include_path,
174 include_pseudo)
175 tar_cmd = "tar cf %s -C %s ." % (tar_file, include_path)
176 else:
177 pseudo = None
178 tar_cmd = "tar c --owner=root --group=root -f %s -C %s ." % (
179 tar_file, include_path)
180 exec_native_cmd(tar_cmd, native_sysroot, pseudo)
181
182 #create destination
183 if path:
184 destination = cls.__validate_path("--include-path", new_rootfs, path)
185 Path(destination).mkdir(parents=True, exist_ok=True)
186 else:
187 destination = new_rootfs
188
189 #extract destination
190 untar_cmd = "tar xf %s -C %s" % (tar_file, destination)
191 if new_pseudo:
192 pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo)
193 else:
194 pseudo = None
195 exec_native_cmd(untar_cmd, native_sysroot, pseudo)
196 os.remove(tar_file)
197
198 for orig_path in part.exclude_path or []:
199 path = orig_path
200
201 full_path = cls.__validate_path("--exclude-path", new_rootfs, path)
202
203 if not os.path.lexists(full_path):
204 continue
205
206 if new_pseudo:
207 pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo)
208 else:
209 pseudo = None
210 if path.endswith(os.sep):
211 # Delete content only.
212 for entry in os.listdir(full_path):
213 full_entry = os.path.join(full_path, entry)
214 rm_cmd = "rm -rf %s" % (full_entry)
215 exec_native_cmd(rm_cmd, native_sysroot, pseudo)
216 else:
217 # Delete whole directory.
218 rm_cmd = "rm -rf %s" % (full_path)
219 exec_native_cmd(rm_cmd, native_sysroot, pseudo)
220
221 # Update part.has_fstab here as fstab may have been added or
222 # removed by the above modifications.
223 part.has_fstab = os.path.exists(os.path.join(new_rootfs, "etc/fstab"))
224 if part.update_fstab_in_rootfs and part.has_fstab and not part.no_fstab_update:
225 fstab_path = os.path.join(new_rootfs, "etc/fstab")
226 # Assume that fstab should always be owned by root with fixed permissions
227 install_cmd = "install -m 0644 -p %s %s" % (part.updated_fstab_path, fstab_path)
228 if new_pseudo:
229 pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo)
230 else:
231 pseudo = None
232 exec_native_cmd(install_cmd, native_sysroot, pseudo)
233
234 part.prepare_rootfs(cr_workdir, oe_builddir,
235 new_rootfs or part.rootfs_dir, native_sysroot,
236 pseudo_dir = new_pseudo or pseudo_dir)