summaryrefslogtreecommitdiffstats
path: root/scripts/lib
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib')
-rw-r--r--scripts/lib/build_perf/html/measurement_chart.html214
-rw-r--r--scripts/lib/build_perf/html/report.html195
-rw-r--r--scripts/lib/build_perf/report.py5
-rw-r--r--scripts/lib/checklayer/__init__.py12
-rw-r--r--scripts/lib/checklayer/cases/common.py35
-rw-r--r--scripts/lib/devtool/__init__.py4
-rw-r--r--scripts/lib/devtool/build.py2
-rw-r--r--scripts/lib/devtool/build_sdk.py7
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py1
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py109
-rw-r--r--scripts/lib/devtool/menuconfig.py7
-rw-r--r--scripts/lib/devtool/standard.py320
-rw-r--r--scripts/lib/devtool/upgrade.py68
-rw-r--r--scripts/lib/devtool/utilcmds.py2
-rw-r--r--scripts/lib/recipetool/append.py14
-rw-r--r--scripts/lib/recipetool/create.py253
-rw-r--r--scripts/lib/recipetool/create_go.py679
-rw-r--r--scripts/lib/recipetool/create_npm.py96
-rw-r--r--scripts/lib/recipetool/licenses.csv37
-rw-r--r--scripts/lib/resulttool/junit.py77
-rwxr-xr-xscripts/lib/resulttool/manualexecution.py2
-rw-r--r--scripts/lib/resulttool/regression.py3
-rw-r--r--scripts/lib/resulttool/report.py2
-rw-r--r--scripts/lib/resulttool/resultutils.py76
-rw-r--r--scripts/lib/resulttool/store.py27
-rw-r--r--scripts/lib/scriptutils.py20
-rw-r--r--scripts/lib/wic/canned-wks/common.wks.inc2
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-gpt.wks2
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks2
-rw-r--r--scripts/lib/wic/canned-wks/efi-bootdisk.wks.in2
-rw-r--r--scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in3
-rw-r--r--scripts/lib/wic/canned-wks/mkefidisk.wks4
-rw-r--r--scripts/lib/wic/canned-wks/mkhybridiso.wks2
-rw-r--r--scripts/lib/wic/canned-wks/sdimage-bootpart.wks2
-rw-r--r--scripts/lib/wic/canned-wks/systemd-bootdisk.wks2
-rw-r--r--scripts/lib/wic/engine.py22
-rw-r--r--scripts/lib/wic/help.py42
-rw-r--r--scripts/lib/wic/partition.py52
-rw-r--r--scripts/lib/wic/pluginbase.py2
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py58
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_biosplusefi.py (renamed from scripts/lib/wic/plugins/source/bootimg-biosplusefi.py)30
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_efi.py (renamed from scripts/lib/wic/plugins/source/bootimg-efi.py)208
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_partition.py (renamed from scripts/lib/wic/plugins/source/bootimg-partition.py)43
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_pcbios.py (renamed from scripts/lib/wic/plugins/source/bootimg-pcbios.py)4
-rw-r--r--scripts/lib/wic/plugins/source/isoimage_isohybrid.py (renamed from scripts/lib/wic/plugins/source/isoimage-isohybrid.py)10
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py4
46 files changed, 1093 insertions, 1670 deletions
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html
index 65f1a227ad..86435273cf 100644
--- a/scripts/lib/build_perf/html/measurement_chart.html
+++ b/scripts/lib/build_perf/html/measurement_chart.html
@@ -1,50 +1,168 @@
1<script type="text/javascript"> 1<script type="module">
2 chartsDrawing += 1; 2 // Get raw data
3 google.charts.setOnLoadCallback(drawChart_{{ chart_elem_id }}); 3 const rawData = [
4 function drawChart_{{ chart_elem_id }}() { 4 {% for sample in measurement.samples %}
5 var data = new google.visualization.DataTable(); 5 [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'],
6 6 {% endfor %}
7 // Chart options 7 ];
8 var options = { 8
9 theme : 'material', 9 const convertToMinute = (time) => {
10 legend: 'none', 10 return time[0]*60 + time[1] + time[2]/60 + time[3]/3600;
11 hAxis: { format: '', title: 'Commit number', 11 }
12 minValue: {{ chart_opts.haxis.min }}, 12
13 maxValue: {{ chart_opts.haxis.max }} }, 13 // Update value format to either minutes or leave as size value
14 {% if measurement.type == 'time' %} 14 const updateValue = (value) => {
15 vAxis: { format: 'h:mm:ss' }, 15 // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds]
16 {% else %} 16 return Array.isArray(value) ? convertToMinute(value) : value
17 vAxis: { format: '' }, 17 }
18 {% endif %} 18
19 pointSize: 5, 19 // Convert raw data to the format: [time, value]
20 chartArea: { left: 80, right: 15 }, 20 const data = rawData.map(([commit, value, time]) => {
21 }; 21 return [
22 22 // The Date object takes values in milliseconds rather than seconds. So to use a Unix timestamp we have to multiply it by 1000.
23 // Define data columns 23 new Date(time * 1000).getTime(),
24 data.addColumn('number', 'Commit'); 24 // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds]
25 data.addColumn('{{ measurement.value_type.gv_data_type }}', 25 updateValue(value)
26 '{{ measurement.value_type.quantity }}'); 26 ]
27 // Add data rows 27 });
28 data.addRows([ 28
29 {% for sample in measurement.samples %} 29 const commitCountList = rawData.map(([commit, value, time]) => {
30 [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}], 30 return commit
31 {% endfor %} 31 });
32 ]); 32
33 33 const commitCountData = rawData.map(([commit, value, time]) => {
34 // Finally, draw the chart 34 return updateValue(value)
35 chart_div = document.getElementById('{{ chart_elem_id }}'); 35 });
36 var chart = new google.visualization.LineChart(chart_div); 36
37 google.visualization.events.addListener(chart, 'ready', function () { 37 // Set chart options
38 //chart_div = document.getElementById('{{ chart_elem_id }}'); 38 const option_start_time = {
39 //chart_div.innerHTML = '<img src="' + chart.getImageURI() + '">'; 39 tooltip: {
40 png_div = document.getElementById('{{ chart_elem_id }}_png'); 40 trigger: 'axis',
41 png_div.outerHTML = '<a id="{{ chart_elem_id }}_png" href="' + chart.getImageURI() + '">PNG</a>'; 41 enterable: true,
42 console.log("CHART READY: {{ chart_elem_id }}"); 42 position: function (point, params, dom, rect, size) {
43 chartsDrawing -= 1; 43 return [point[0], '0%'];
44 if (chartsDrawing == 0) 44 },
45 console.log("ALL CHARTS READY"); 45 formatter: function (param) {
46 const value = param[0].value[1]
47 const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value)
48 const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)');
49
50 // Add commit hash to the tooltip as a link
51 const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}`
52 if ('{{ measurement.value_type.quantity }}' == 'time') {
53 const hours = Math.floor(value/60)
54 const minutes = Math.floor(value % 60)
55 const seconds = Math.floor((value * 60) % 60)
56 return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
57 }
58 return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
59 ;}
60 },
61 xAxis: {
62 type: 'time',
63 },
64 yAxis: {
65 name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB',
66 type: 'value',
67 min: function(value) {
68 return Math.round(value.min - 0.5);
69 },
70 max: function(value) {
71 return Math.round(value.max + 0.5);
72 }
73 },
74 dataZoom: [
75 {
76 type: 'slider',
77 xAxisIndex: 0,
78 filterMode: 'none'
79 },
80 ],
81 series: [
82 {
83 name: '{{ measurement.value_type.quantity }}',
84 type: 'line',
85 symbol: 'none',
86 data: data
87 }
88 ]
89 };
90
91 const option_commit_count = {
92 tooltip: {
93 trigger: 'axis',
94 enterable: true,
95 position: function (point, params, dom, rect, size) {
96 return [point[0], '0%'];
97 },
98 formatter: function (param) {
99 const value = param[0].value
100 const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value)
101 const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)');
102 // Add commit hash to the tooltip as a link
103 const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}`
104 if ('{{ measurement.value_type.quantity }}' == 'time') {
105 const hours = Math.floor(value/60)
106 const minutes = Math.floor(value % 60)
107 const seconds = Math.floor((value * 60) % 60)
108 return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
109 }
110 return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
111 ;}
112 },
113 xAxis: {
114 name: 'Commit count',
115 type: 'category',
116 data: commitCountList
117 },
118 yAxis: {
119 name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB',
120 type: 'value',
121 min: function(value) {
122 return Math.round(value.min - 0.5);
123 },
124 max: function(value) {
125 return Math.round(value.max + 0.5);
126 }
127 },
128 dataZoom: [
129 {
130 type: 'slider',
131 xAxisIndex: 0,
132 filterMode: 'none'
133 },
134 ],
135 series: [
136 {
137 name: '{{ measurement.value_type.quantity }}',
138 type: 'line',
139 symbol: 'none',
140 data: commitCountData
141 }
142 ]
143 };
144
145 // Draw chart
146 const draw_chart = (chart_id, option) => {
147 let chart_name
148 const chart_div = document.getElementById(chart_id);
149 // Set dark mode
150 if (window.matchMedia('(prefers-color-scheme: dark)').matches) {
151 chart_name= echarts.init(chart_div, 'dark', {
152 height: 320
153 });
154 } else {
155 chart_name= echarts.init(chart_div, null, {
156 height: 320
157 });
158 }
159 // Change chart size with browser resize
160 window.addEventListener('resize', function() {
161 chart_name.resize();
46 }); 162 });
47 chart.draw(data, options); 163 return chart_name.setOption(option);
48} 164 }
49</script>
50 165
166 draw_chart('{{ chart_elem_start_time_id }}', option_start_time)
167 draw_chart('{{ chart_elem_commit_count_id }}', option_commit_count)
168</script>
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html
index d1ba6f2578..28cd80e738 100644
--- a/scripts/lib/build_perf/html/report.html
+++ b/scripts/lib/build_perf/html/report.html
@@ -3,17 +3,14 @@
3<head> 3<head>
4{# Scripts, for visualization#} 4{# Scripts, for visualization#}
5<!--START-OF-SCRIPTS--> 5<!--START-OF-SCRIPTS-->
6<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script> 6<script src=" https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js "></script>
7<script type="text/javascript">
8google.charts.load('current', {'packages':['corechart']});
9var chartsDrawing = 0;
10</script>
11 7
12{# Render measurement result charts #} 8{# Render measurement result charts #}
13{% for test in test_data %} 9{% for test in test_data %}
14 {% if test.status == 'SUCCESS' %} 10 {% if test.status == 'SUCCESS' %}
15 {% for measurement in test.measurements %} 11 {% for measurement in test.measurements %}
16 {% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %} 12 {% set chart_elem_start_time_id = test.name + '_' + measurement.name + '_chart_start_time' %}
13 {% set chart_elem_commit_count_id = test.name + '_' + measurement.name + '_chart_commit_count' %}
17 {% include 'measurement_chart.html' %} 14 {% include 'measurement_chart.html' %}
18 {% endfor %} 15 {% endfor %}
19 {% endif %} 16 {% endif %}
@@ -23,28 +20,29 @@ var chartsDrawing = 0;
23 20
24{# Styles #} 21{# Styles #}
25<style> 22<style>
23:root {
24 --text: #000;
25 --bg: #fff;
26 --h2heading: #707070;
27 --link: #0000EE;
28 --trtopborder: #9ca3af;
29 --trborder: #e5e7eb;
30 --chartborder: #f0f0f0;
31 }
26.meta-table { 32.meta-table {
27 font-size: 14px; 33 font-size: 14px;
28 text-align: left; 34 text-align: left;
29 border-collapse: collapse; 35 border-collapse: collapse;
30} 36}
31.meta-table tr:nth-child(even){background-color: #f2f2f2}
32meta-table th, .meta-table td {
33 padding: 4px;
34}
35.summary { 37.summary {
36 margin: 0;
37 font-size: 14px; 38 font-size: 14px;
38 text-align: left; 39 text-align: left;
39 border-collapse: collapse; 40 border-collapse: collapse;
40} 41}
41summary th, .meta-table td {
42 padding: 4px;
43}
44.measurement { 42.measurement {
45 padding: 8px 0px 8px 8px; 43 padding: 8px 0px 8px 8px;
46 border: 2px solid #f0f0f0; 44 border: 2px solid var(--chartborder);
47 margin-bottom: 10px; 45 margin: 1.5rem 0;
48} 46}
49.details { 47.details {
50 margin: 0; 48 margin: 0;
@@ -64,18 +62,97 @@ summary th, .meta-table td {
64 background-color: #f0f0f0; 62 background-color: #f0f0f0;
65 margin-left: 10px; 63 margin-left: 10px;
66} 64}
67hr { 65.card-container {
68 color: #f0f0f0; 66 border-bottom-width: 1px;
67 padding: 1.25rem 3rem;
68 box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1);
69 border-radius: 0.25rem;
70}
71body {
72 font-family: 'Helvetica', sans-serif;
73 margin: 3rem 8rem;
74 background-color: var(--bg);
75 color: var(--text);
76}
77h1 {
78 text-align: center;
69} 79}
70h2 { 80h2 {
71 font-size: 20px; 81 font-size: 1.5rem;
72 margin-bottom: 0px; 82 margin-bottom: 0px;
73 color: #707070; 83 color: var(--h2heading);
84 padding-top: 1.5rem;
74} 85}
75h3 { 86h3 {
76 font-size: 16px; 87 font-size: 1.3rem;
77 margin: 0px; 88 margin: 0px;
78 color: #707070; 89 color: var(--h2heading);
90 padding: 1.5rem 0;
91}
92h4 {
93 font-size: 14px;
94 font-weight: lighter;
95 line-height: 1.2rem;
96 margin: auto;
97 padding-top: 1rem;
98}
99table {
100 margin-top: 1.5rem;
101 line-height: 2rem;
102}
103tr {
104 border-bottom: 1px solid var(--trborder);
105}
106tr:first-child {
107 border-bottom: 1px solid var(--trtopborder);
108}
109tr:last-child {
110 border-bottom: none;
111}
112a {
113 text-decoration: none;
114 font-weight: bold;
115 color: var(--link);
116}
117a:hover {
118 color: #8080ff;
119}
120button {
121 background-color: #F3F4F6;
122 border: none;
123 outline: none;
124 cursor: pointer;
125 padding: 10px 12px;
126 transition: 0.3s;
127 border-radius: 8px;
128 color: #3A4353;
129}
130button:hover {
131 background-color: #d6d9e0;
132}
133.tab button.active {
134 background-color: #d6d9e0;
135}
136@media (prefers-color-scheme: dark) {
137 :root {
138 --text: #e9e8fa;
139 --bg: #0F0C28;
140 --h2heading: #B8B7CB;
141 --link: #87cefa;
142 --trtopborder: #394150;
143 --trborder: #212936;
144 --chartborder: #b1b0bf;
145 }
146 button {
147 background-color: #28303E;
148 color: #fff;
149 }
150 button:hover {
151 background-color: #545a69;
152 }
153 .tab button.active {
154 background-color: #545a69;
155 }
79} 156}
80</style> 157</style>
81 158
@@ -83,13 +160,14 @@ h3 {
83</head> 160</head>
84 161
85{% macro poky_link(commit) -%} 162{% macro poky_link(commit) -%}
86 <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> 163 <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a>
87{%- endmacro %} 164{%- endmacro %}
88 165
89<body><div style="width: 700px"> 166<body><div>
167 <h1 style="text-align: center;">Performance Test Report</h1>
90 {# Test metadata #} 168 {# Test metadata #}
91 <h2>General</h2> 169 <h2>General</h2>
92 <hr> 170 <h4>The table provides an overview of the comparison between two selected commits from the same branch.</h4>
93 <table class="meta-table" style="width: 100%"> 171 <table class="meta-table" style="width: 100%">
94 <tr> 172 <tr>
95 <th></th> 173 <th></th>
@@ -112,19 +190,21 @@ h3 {
112 190
113 {# Test result summary #} 191 {# Test result summary #}
114 <h2>Test result summary</h2> 192 <h2>Test result summary</h2>
115 <hr> 193 <h4>The test summary presents a thorough breakdown of each test conducted on the branch, including details such as build time and disk space consumption. Additionally, it gives insights into the average time taken for test execution, along with absolute and relative values for a better understanding.</h4>
116 <table class="summary" style="width: 100%"> 194 <table class="summary" style="width: 100%">
195 <tr>
196 <th>Test name</th>
197 <th>Measurement description</th>
198 <th>Mean value</th>
199 <th>Absolute difference</th>
200 <th>Relative difference</th>
201 </tr>
117 {% for test in test_data %} 202 {% for test in test_data %}
118 {% if loop.index is even %}
119 {% set row_style = 'style="background-color: #f2f2f2"' %}
120 {% else %}
121 {% set row_style = 'style="background-color: #ffffff"' %}
122 {% endif %}
123 {% if test.status == 'SUCCESS' %} 203 {% if test.status == 'SUCCESS' %}
124 {% for measurement in test.measurements %} 204 {% for measurement in test.measurements %}
125 <tr {{ row_style }}> 205 <tr {{ row_style }}>
126 {% if loop.index == 1 %} 206 {% if loop.index == 1 %}
127 <td>{{ test.name }}: {{ test.description }}</td> 207 <td><a href=#{{test.name}}>{{ test.name }}: {{ test.description }}</a></td>
128 {% else %} 208 {% else %}
129 {# add empty cell in place of the test name#} 209 {# add empty cell in place of the test name#}
130 <td></td> 210 <td></td>
@@ -153,10 +233,12 @@ h3 {
153 </table> 233 </table>
154 234
155 {# Detailed test results #} 235 {# Detailed test results #}
236 <h2>Test details</h2>
237 <h4>The following section provides details of each test, accompanied by charts representing build time and disk usage over time or by commit number.</h4>
156 {% for test in test_data %} 238 {% for test in test_data %}
157 <h2>{{ test.name }}: {{ test.description }}</h2> 239 <h3 style="color: #000;" id={{test.name}}>{{ test.name }}: {{ test.description }}</h3>
158 <hr>
159 {% if test.status == 'SUCCESS' %} 240 {% if test.status == 'SUCCESS' %}
241 <div class="card-container">
160 {% for measurement in test.measurements %} 242 {% for measurement in test.measurements %}
161 <div class="measurement"> 243 <div class="measurement">
162 <h3>{{ measurement.description }}</h3> 244 <h3>{{ measurement.description }}</h3>
@@ -178,7 +260,18 @@ h3 {
178 <tr> 260 <tr>
179 <td style="width: 75%"> 261 <td style="width: 75%">
180 {# Linechart #} 262 {# Linechart #}
181 <div id="{{ test.name }}_{{ measurement.name }}_chart"></div> 263 <div class="tab {{ test.name }}_{{ measurement.name }}_tablinks">
264 <button class="tablinks active" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_start_time', '{{ test.name }}_{{ measurement.name }}')">Chart with start time</button>
265 <button class="tablinks" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_commit_count', '{{ test.name }}_{{ measurement.name }}')">Chart with commit count</button>
266 </div>
267 <div class="{{ test.name }}_{{ measurement.name }}_tabcontent">
268 <div id="{{ test.name }}_{{ measurement.name }}_start_time" class="tabcontent" style="display: block;">
269 <div id="{{ test.name }}_{{ measurement.name }}_chart_start_time"></div>
270 </div>
271 <div id="{{ test.name }}_{{ measurement.name }}_commit_count" class="tabcontent" style="display: none;">
272 <div id="{{ test.name }}_{{ measurement.name }}_chart_commit_count"></div>
273 </div>
274 </div>
182 </td> 275 </td>
183 <td> 276 <td>
184 {# Measurement statistics #} 277 {# Measurement statistics #}
@@ -275,7 +368,8 @@ h3 {
275 {% endif %} 368 {% endif %}
276 {% endif %} 369 {% endif %}
277 </div> 370 </div>
278 {% endfor %} 371 {% endfor %}
372 </div>
279 {# Unsuccessful test #} 373 {# Unsuccessful test #}
280 {% else %} 374 {% else %}
281 <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} 375 <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }}
@@ -284,6 +378,31 @@ h3 {
284 <div class="preformatted">{{ test.message }}</div> 378 <div class="preformatted">{{ test.message }}</div>
285 {% endif %} 379 {% endif %}
286 {% endfor %} 380 {% endfor %}
287</div></body> 381</div>
288</html>
289 382
383<script>
384function openChart(event, chartType, chartName) {
385 let i, tabcontents, tablinks
386 tabcontents = document.querySelectorAll(`.${chartName}_tabcontent > .tabcontent`);
387 tabcontents.forEach((tabcontent) => {
388 tabcontent.style.display = "none";
389 });
390
391 tablinks = document.querySelectorAll(`.${chartName}_tablinks > .tablinks`);
392 tablinks.forEach((tabLink) => {
393 tabLink.classList.remove('active');
394 });
395
396 const targetTab = document.getElementById(chartType)
397 targetTab.style.display = "block";
398
399 // Call resize on the ECharts instance to redraw the chart
400 const chartContainer = targetTab.querySelector('div')
401 echarts.init(chartContainer).resize();
402
403 event.currentTarget.classList.add('active');
404}
405</script>
406
407</body>
408</html>
diff --git a/scripts/lib/build_perf/report.py b/scripts/lib/build_perf/report.py
index ab77424cc7..f4e6a92e09 100644
--- a/scripts/lib/build_perf/report.py
+++ b/scripts/lib/build_perf/report.py
@@ -294,7 +294,7 @@ class SizeVal(MeasurementVal):
294 return "null" 294 return "null"
295 return self / 1024 295 return self / 1024
296 296
297def measurement_stats(meas, prefix=''): 297def measurement_stats(meas, prefix='', time=0):
298 """Get statistics of a measurement""" 298 """Get statistics of a measurement"""
299 if not meas: 299 if not meas:
300 return {prefix + 'sample_cnt': 0, 300 return {prefix + 'sample_cnt': 0,
@@ -319,6 +319,8 @@ def measurement_stats(meas, prefix=''):
319 stats['quantity'] = val_cls.quantity 319 stats['quantity'] = val_cls.quantity
320 stats[prefix + 'sample_cnt'] = len(values) 320 stats[prefix + 'sample_cnt'] = len(values)
321 321
322 # Add start time for both type sysres and disk usage
323 start_time = time
322 mean_val = val_cls(mean(values)) 324 mean_val = val_cls(mean(values))
323 min_val = val_cls(min(values)) 325 min_val = val_cls(min(values))
324 max_val = val_cls(max(values)) 326 max_val = val_cls(max(values))
@@ -334,6 +336,7 @@ def measurement_stats(meas, prefix=''):
334 stats[prefix + 'max'] = max_val 336 stats[prefix + 'max'] = max_val
335 stats[prefix + 'minus'] = val_cls(mean_val - min_val) 337 stats[prefix + 'minus'] = val_cls(mean_val - min_val)
336 stats[prefix + 'plus'] = val_cls(max_val - mean_val) 338 stats[prefix + 'plus'] = val_cls(max_val - mean_val)
339 stats[prefix + 'start_time'] = start_time
337 340
338 return stats 341 return stats
339 342
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py
index 62ecdfe390..86aadf39a6 100644
--- a/scripts/lib/checklayer/__init__.py
+++ b/scripts/lib/checklayer/__init__.py
@@ -452,3 +452,15 @@ def compare_signatures(old_sigs, curr_sigs):
452 msg.extend([' ' + line for line in output.splitlines()]) 452 msg.extend([' ' + line for line in output.splitlines()])
453 msg.append('') 453 msg.append('')
454 return '\n'.join(msg) 454 return '\n'.join(msg)
455
456
457def get_git_toplevel(directory):
458 """
459 Try and find the top of the git repository that directory might be in.
460 Returns the top-level directory, or None.
461 """
462 cmd = ["git", "-C", directory, "rev-parse", "--show-toplevel"]
463 try:
464 return subprocess.check_output(cmd, text=True).strip()
465 except:
466 return None
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py
index 97b16f78c8..ddead69a7b 100644
--- a/scripts/lib/checklayer/cases/common.py
+++ b/scripts/lib/checklayer/cases/common.py
@@ -7,7 +7,7 @@ import glob
7import os 7import os
8import unittest 8import unittest
9import re 9import re
10from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures 10from checklayer import get_signatures, LayerType, check_command, compare_signatures, get_git_toplevel
11from checklayer.case import OECheckLayerTestCase 11from checklayer.case import OECheckLayerTestCase
12 12
13class CommonCheckLayer(OECheckLayerTestCase): 13class CommonCheckLayer(OECheckLayerTestCase):
@@ -40,6 +40,38 @@ class CommonCheckLayer(OECheckLayerTestCase):
40 email_regex = re.compile(r"[^@]+@[^@]+") 40 email_regex = re.compile(r"[^@]+@[^@]+")
41 self.assertTrue(email_regex.match(data)) 41 self.assertTrue(email_regex.match(data))
42 42
43 def find_file_by_name(self, globs):
44 """
45 Utility function to find a file that matches the specified list of
46 globs, in either the layer directory itself or the repository top-level
47 directory.
48 """
49 directories = [self.tc.layer["path"]]
50 toplevel = get_git_toplevel(directories[0])
51 if toplevel:
52 directories.append(toplevel)
53
54 for path in directories:
55 for name in globs:
56 files = glob.glob(os.path.join(path, name))
57 if files:
58 return sorted(files)[0]
59 return None
60
61 def test_security(self):
62 """
63 Test that the layer has a SECURITY.md (or similar) file, either in the
64 layer itself or at the top of the containing git repository.
65 """
66 if self.tc.layer["type"] == LayerType.CORE:
67 raise unittest.SkipTest("Core layer's SECURITY is top level")
68
69 filename = self.find_file_by_name(("SECURITY", "SECURITY.*"))
70 self.assertTrue(filename, msg="Layer doesn't contain a SECURITY.md file.")
71
72 size = os.path.getsize(filename)
73 self.assertGreater(size, 0, msg=f"{filename} has no content.")
74
43 def test_parse(self): 75 def test_parse(self):
44 check_command('Layer %s failed to parse.' % self.tc.layer['name'], 76 check_command('Layer %s failed to parse.' % self.tc.layer['name'],
45 'bitbake -p') 77 'bitbake -p')
@@ -72,7 +104,6 @@ class CommonCheckLayer(OECheckLayerTestCase):
72 self.tc.layer['name']) 104 self.tc.layer['name'])
73 self.fail('\n'.join(msg)) 105 self.fail('\n'.join(msg))
74 106
75 @unittest.expectedFailure
76 def test_patches_upstream_status(self): 107 def test_patches_upstream_status(self):
77 import sys 108 import sys
78 sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) 109 sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/'))
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
index 6133c1c5b4..fa6e1a34fd 100644
--- a/scripts/lib/devtool/__init__.py
+++ b/scripts/lib/devtool/__init__.py
@@ -234,7 +234,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
234 f.write(line) 234 f.write(line)
235 235
236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) 236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
237 bb.process.run('git tag -f %s' % basetag, cwd=repodir) 237 bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir)
238 238
239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, 239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe 240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
@@ -256,7 +256,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) 256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
257 found = False 257 found = False
258 if os.path.exists(os.path.join(repodir, '.gitmodules')): 258 if os.path.exists(os.path.join(repodir, '.gitmodules')):
259 bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir) 259 bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir)
260 260
261def recipe_to_append(recipefile, config, wildcard=False): 261def recipe_to_append(recipefile, config, wildcard=False):
262 """ 262 """
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py
index 935ffab46c..0b2c3d33dc 100644
--- a/scripts/lib/devtool/build.py
+++ b/scripts/lib/devtool/build.py
@@ -49,7 +49,7 @@ def build(args, config, basepath, workspace):
49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) 49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
50 if not rd: 50 if not rd:
51 return 1 51 return 1
52 deploytask = 'do_deploy' in rd.getVar('__BBTASKS') 52 deploytask = 'do_deploy' in bb.build.listtasks(rd)
53 finally: 53 finally:
54 tinfoil.shutdown() 54 tinfoil.shutdown()
55 55
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
index 1cd4831d2b..990303982c 100644
--- a/scripts/lib/devtool/build_sdk.py
+++ b/scripts/lib/devtool/build_sdk.py
@@ -5,14 +5,7 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import os
9import subprocess
10import logging 8import logging
11import glob
12import shutil
13import errno
14import sys
15import tempfile
16from devtool import DevtoolError 9from devtool import DevtoolError
17from devtool import build_image 10from devtool import build_image
18 11
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
index a62b93224e..ee5bb57265 100644
--- a/scripts/lib/devtool/ide_plugins/ide_code.py
+++ b/scripts/lib/devtool/ide_plugins/ide_code.py
@@ -161,7 +161,6 @@ class IdeVSCode(IdeBase):
161 if modified_recipe.build_tool is not BuildTool.CMAKE: 161 if modified_recipe.build_tool is not BuildTool.CMAKE:
162 return 162 return
163 recommendations += [ 163 recommendations += [
164 "twxs.cmake",
165 "ms-vscode.cmake-tools", 164 "ms-vscode.cmake-tools",
166 "ms-vscode.cpptools", 165 "ms-vscode.cpptools",
167 "ms-vscode.cpptools-extension-pack", 166 "ms-vscode.cpptools-extension-pack",
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
index 7807b322b3..931408fa74 100755
--- a/scripts/lib/devtool/ide_sdk.py
+++ b/scripts/lib/devtool/ide_sdk.py
@@ -167,7 +167,7 @@ class RecipeImage:
167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') 167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
168 168
169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar( 169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
170 'IMAGE_INSTALL') 170 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES')
171 171
172 @property 172 @property
173 def debug_support(self): 173 def debug_support(self):
@@ -288,6 +288,7 @@ class RecipeModified:
288 self.bblayers = None 288 self.bblayers = None
289 self.bpn = None 289 self.bpn = None
290 self.d = None 290 self.d = None
291 self.debug_build = None
291 self.fakerootcmd = None 292 self.fakerootcmd = None
292 self.fakerootenv = None 293 self.fakerootenv = None
293 self.libdir = None 294 self.libdir = None
@@ -333,7 +334,7 @@ class RecipeModified:
333 self.srctree = workspace[workspacepn]['srctree'] 334 self.srctree = workspace[workspacepn]['srctree']
334 # Need to grab this here in case the source is within a subdirectory 335 # Need to grab this here in case the source is within a subdirectory
335 self.real_srctree = get_real_srctree( 336 self.real_srctree = get_real_srctree(
336 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR')) 337 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR'))
337 self.bbappend = workspace[workspacepn]['bbappend'] 338 self.bbappend = workspace[workspacepn]['bbappend']
338 339
339 self.ide_sdk_dir = os.path.join( 340 self.ide_sdk_dir = os.path.join(
@@ -348,6 +349,7 @@ class RecipeModified:
348 self.bpn = recipe_d.getVar('BPN') 349 self.bpn = recipe_d.getVar('BPN')
349 self.cxx = recipe_d.getVar('CXX') 350 self.cxx = recipe_d.getVar('CXX')
350 self.d = recipe_d.getVar('D') 351 self.d = recipe_d.getVar('D')
352 self.debug_build = recipe_d.getVar('DEBUG_BUILD')
351 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') 353 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
352 self.fakerootenv = recipe_d.getVar('FAKEROOTENV') 354 self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
353 self.libdir = recipe_d.getVar('libdir') 355 self.libdir = recipe_d.getVar('libdir')
@@ -389,17 +391,6 @@ class RecipeModified:
389 self.recipe_id = self.bpn + "-" + self.package_arch 391 self.recipe_id = self.bpn + "-" + self.package_arch
390 self.recipe_id_pretty = self.bpn + ": " + self.package_arch 392 self.recipe_id_pretty = self.bpn + ": " + self.package_arch
391 393
392 def append_to_bbappend(self, append_text):
393 with open(self.bbappend, 'a') as bbap:
394 bbap.write(append_text)
395
396 def remove_from_bbappend(self, append_text):
397 with open(self.bbappend, 'r') as bbap:
398 text = bbap.read()
399 new_text = text.replace(append_text, '')
400 with open(self.bbappend, 'w') as bbap:
401 bbap.write(new_text)
402
403 @staticmethod 394 @staticmethod
404 def is_valid_shell_variable(var): 395 def is_valid_shell_variable(var):
405 """Skip strange shell variables like systemd 396 """Skip strange shell variables like systemd
@@ -412,34 +403,6 @@ class RecipeModified:
412 return True 403 return True
413 return False 404 return False
414 405
415 def debug_build_config(self, args):
416 """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise"""
417 if self.build_tool is BuildTool.CMAKE:
418 append_text = os.linesep + \
419 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep
420 if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
421 self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = {
422 "type": "STRING",
423 "value": "Debug",
424 }
425 self.append_to_bbappend(append_text)
426 elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
427 del self.cmake_cache_vars['CMAKE_BUILD_TYPE']
428 self.remove_from_bbappend(append_text)
429 elif self.build_tool is BuildTool.MESON:
430 append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep
431 if args.debug_build_config and self.meson_buildtype != "debug":
432 self.mesonopts.replace(
433 '--buildtype ' + self.meson_buildtype, '--buildtype debug')
434 self.append_to_bbappend(append_text)
435 elif self.meson_buildtype == "debug":
436 self.mesonopts.replace(
437 '--buildtype debug', '--buildtype plain')
438 self.remove_from_bbappend(append_text)
439 elif args.debug_build_config:
440 logger.warn(
441 "--debug-build-config is not implemented for this build tool yet.")
442
443 def solib_search_path(self, image): 406 def solib_search_path(self, image):
444 """Search for debug symbols in the rootfs and rootfs-dbg 407 """Search for debug symbols in the rootfs and rootfs-dbg
445 408
@@ -493,7 +456,7 @@ class RecipeModified:
493 456
494 vars = (key for key in d.keys() if not key.startswith( 457 vars = (key for key in d.keys() if not key.startswith(
495 "__") and not d.getVarFlag(key, "func", False)) 458 "__") and not d.getVarFlag(key, "func", False))
496 for var in vars: 459 for var in sorted(vars):
497 func = d.getVarFlag(var, "func", False) 460 func = d.getVarFlag(var, "func", False)
498 if d.getVarFlag(var, 'python', False) and func: 461 if d.getVarFlag(var, 'python', False) and func:
499 continue 462 continue
@@ -545,7 +508,7 @@ class RecipeModified:
545 cache_vars = {} 508 cache_vars = {}
546 oecmake_args = d.getVar('OECMAKE_ARGS').split() 509 oecmake_args = d.getVar('OECMAKE_ARGS').split()
547 extra_oecmake = d.getVar('EXTRA_OECMAKE').split() 510 extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
548 for param in oecmake_args + extra_oecmake: 511 for param in sorted(oecmake_args + extra_oecmake):
549 d_pref = "-D" 512 d_pref = "-D"
550 if param.startswith(d_pref): 513 if param.startswith(d_pref):
551 param = param[len(d_pref):] 514 param = param[len(d_pref):]
@@ -712,42 +675,6 @@ class RecipeModified:
712 binaries.append(abs_name[d_len:]) 675 binaries.append(abs_name[d_len:])
713 return sorted(binaries) 676 return sorted(binaries)
714 677
715 def gen_delete_package_dirs(self):
716 """delete folders of package tasks
717
718 This is a workaround for and issue with recipes having their sources
719 downloaded as file://
720 This likely breaks pseudo like:
721 path mismatch [3 links]: ino 79147802 db
722 .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/
723 cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp
724 .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp
725 Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue.
726 """
727 cmd_lines = ['#!/bin/sh']
728
729 # Set up the appropriate environment
730 newenv = dict(os.environ)
731 for varvalue in self.fakerootenv.split():
732 if '=' in varvalue:
733 splitval = varvalue.split('=', 1)
734 newenv[splitval[0]] = splitval[1]
735
736 # Replicate the environment variables from bitbake
737 for var, val in newenv.items():
738 if not RecipeModified.is_valid_shell_variable(var):
739 continue
740 cmd_lines.append('%s="%s"' % (var, val))
741 cmd_lines.append('export %s' % var)
742
743 # Delete the folders
744 pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [
745 "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]])
746 cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs)
747 cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd))
748
749 return self.write_script(cmd_lines, 'delete_package_dirs')
750
751 def gen_deploy_target_script(self, args): 678 def gen_deploy_target_script(self, args):
752 """Generate a script which does what devtool deploy-target does 679 """Generate a script which does what devtool deploy-target does
753 680
@@ -785,8 +712,6 @@ class RecipeModified:
785 """Generate a script which does install and deploy""" 712 """Generate a script which does install and deploy"""
786 cmd_lines = ['#!/bin/bash'] 713 cmd_lines = ['#!/bin/bash']
787 714
788 cmd_lines.append(self.gen_delete_package_dirs())
789
790 # . oe-init-build-env $BUILDDIR 715 # . oe-init-build-env $BUILDDIR
791 # Note: Sourcing scripts with arguments requires bash 716 # Note: Sourcing scripts with arguments requires bash
792 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( 717 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
@@ -988,6 +913,13 @@ def ide_setup(args, config, basepath, workspace):
988 recipe_modified.gen_meson_wrapper() 913 recipe_modified.gen_meson_wrapper()
989 ide.setup_modified_recipe( 914 ide.setup_modified_recipe(
990 args, recipe_image, recipe_modified) 915 args, recipe_image, recipe_modified)
916
917 if recipe_modified.debug_build != '1':
918 logger.warn(
919 'Recipe %s is compiled with release build configuration. '
920 'You might want to add DEBUG_BUILD = "1" to %s. '
921 'Note that devtool modify --debug-build can do this automatically.',
922 recipe_modified.name, recipe_modified.bbappend)
991 else: 923 else:
992 raise DevtoolError("Must not end up here.") 924 raise DevtoolError("Must not end up here.")
993 925
@@ -995,6 +927,15 @@ def ide_setup(args, config, basepath, workspace):
995def register_commands(subparsers, context): 927def register_commands(subparsers, context):
996 """Register devtool subcommands from this plugin""" 928 """Register devtool subcommands from this plugin"""
997 929
930 # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE
931 # configuration is generated. In the case of the eSDK, the bootstrapping is performed
932 # during the installation of the eSDK installer. Running the ide-sdk plugin from an
933 # eSDK installer-based setup would require skipping the bootstrapping and probably
934 # taking some other differences into account when generating the IDE configurations.
935 # This would be possible. But it is not implemented.
936 if context.fixed_setup:
937 return
938
998 global ide_plugins 939 global ide_plugins
999 940
1000 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. 941 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
@@ -1015,7 +956,7 @@ def register_commands(subparsers, context):
1015 help='Setup the SDK and configure the IDE') 956 help='Setup the SDK and configure the IDE')
1016 parser_ide_sdk.add_argument( 957 parser_ide_sdk.add_argument(
1017 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' 958 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
1018 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.') 959 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.')
1019 parser_ide_sdk.add_argument( 960 parser_ide_sdk.add_argument(
1020 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, 961 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
1021 help='Different SDK types are supported:\n' 962 help='Different SDK types are supported:\n'
@@ -1052,7 +993,7 @@ def register_commands(subparsers, context):
1052 parser_ide_sdk.add_argument( 993 parser_ide_sdk.add_argument(
1053 '-I', '--key', help='Specify ssh private key for connection to the target') 994 '-I', '--key', help='Specify ssh private key for connection to the target')
1054 parser_ide_sdk.add_argument( 995 parser_ide_sdk.add_argument(
1055 '--skip-bitbake', help='Generate IDE configuration but skip calling bibtake to update the SDK.', action='store_true') 996 '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true')
1056 parser_ide_sdk.add_argument( 997 parser_ide_sdk.add_argument(
1057 '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true') 998 '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true')
1058 parser_ide_sdk.add_argument( 999 parser_ide_sdk.add_argument(
@@ -1065,6 +1006,4 @@ def register_commands(subparsers, context):
1065 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') 1006 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
1066 parser_ide_sdk.add_argument( 1007 parser_ide_sdk.add_argument(
1067 '--no-check-space', help='Do not check for available space before deploying', action='store_true') 1008 '--no-check-space', help='Do not check for available space before deploying', action='store_true')
1068 parser_ide_sdk.add_argument(
1069 '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true')
1070 parser_ide_sdk.set_defaults(func=ide_setup) 1009 parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
index 18daef30c3..1054960551 100644
--- a/scripts/lib/devtool/menuconfig.py
+++ b/scripts/lib/devtool/menuconfig.py
@@ -23,9 +23,6 @@
23import os 23import os
24import bb 24import bb
25import logging 25import logging
26import argparse
27import re
28import glob
29from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command 26from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
30from devtool import check_workspace_recipe 27from devtool import check_workspace_recipe
31logger = logging.getLogger('devtool') 28logger = logging.getLogger('devtool')
@@ -34,7 +31,6 @@ def menuconfig(args, config, basepath, workspace):
34 """Entry point for the devtool 'menuconfig' subcommand""" 31 """Entry point for the devtool 'menuconfig' subcommand"""
35 32
36 rd = "" 33 rd = ""
37 kconfigpath = ""
38 pn_src = "" 34 pn_src = ""
39 localfilesdir = "" 35 localfilesdir = ""
40 workspace_dir = "" 36 workspace_dir = ""
@@ -51,7 +47,6 @@ def menuconfig(args, config, basepath, workspace):
51 raise DevtoolError("This recipe does not support menuconfig option") 47 raise DevtoolError("This recipe does not support menuconfig option")
52 48
53 workspace_dir = os.path.join(config.workspace_path,'sources') 49 workspace_dir = os.path.join(config.workspace_path,'sources')
54 kconfigpath = rd.getVar('B')
55 pn_src = os.path.join(workspace_dir,pn) 50 pn_src = os.path.join(workspace_dir,pn)
56 51
57 # add check to see if oe_local_files exists or not 52 # add check to see if oe_local_files exists or not
@@ -70,7 +65,7 @@ def menuconfig(args, config, basepath, workspace):
70 logger.info('Launching menuconfig') 65 logger.info('Launching menuconfig')
71 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) 66 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
72 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') 67 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
73 res = standard._create_kconfig_diff(pn_src,rd,fragment) 68 standard._create_kconfig_diff(pn_src,rd,fragment)
74 69
75 return 0 70 return 0
76 71
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
index 6674e67267..1fd5947c41 100644
--- a/scripts/lib/devtool/standard.py
+++ b/scripts/lib/devtool/standard.py
@@ -18,11 +18,13 @@ import argparse_oe
18import scriptutils 18import scriptutils
19import errno 19import errno
20import glob 20import glob
21import filecmp
22from collections import OrderedDict 21from collections import OrderedDict
22
23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError 23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError
24from devtool import parse_recipe 24from devtool import parse_recipe
25 25
26import bb.utils
27
26logger = logging.getLogger('devtool') 28logger = logging.getLogger('devtool')
27 29
28override_branch_prefix = 'devtool-override-' 30override_branch_prefix = 'devtool-override-'
@@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-'
30 32
31def add(args, config, basepath, workspace): 33def add(args, config, basepath, workspace):
32 """Entry point for the devtool 'add' subcommand""" 34 """Entry point for the devtool 'add' subcommand"""
33 import bb 35 import bb.data
36 import bb.process
34 import oe.recipeutils 37 import oe.recipeutils
35 38
36 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: 39 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri:
@@ -206,7 +209,7 @@ def add(args, config, basepath, workspace):
206 for fn in os.listdir(tempdir): 209 for fn in os.listdir(tempdir):
207 shutil.move(os.path.join(tempdir, fn), recipedir) 210 shutil.move(os.path.join(tempdir, fn), recipedir)
208 else: 211 else:
209 raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) 212 raise DevtoolError(f'Failed to create a recipe file for source {source}')
210 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) 213 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
211 if os.path.exists(attic_recipe): 214 if os.path.exists(attic_recipe):
212 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) 215 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
@@ -305,6 +308,7 @@ def add(args, config, basepath, workspace):
305 308
306def _check_compatible_recipe(pn, d): 309def _check_compatible_recipe(pn, d):
307 """Check if the recipe is supported by devtool""" 310 """Check if the recipe is supported by devtool"""
311 import bb.data
308 if pn == 'perf': 312 if pn == 'perf':
309 raise DevtoolError("The perf recipe does not actually check out " 313 raise DevtoolError("The perf recipe does not actually check out "
310 "source and thus cannot be supported by this tool", 314 "source and thus cannot be supported by this tool",
@@ -374,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
374 378
375def _git_ls_tree(repodir, treeish='HEAD', recursive=False): 379def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
376 """List contents of a git treeish""" 380 """List contents of a git treeish"""
377 import bb 381 import bb.process
378 cmd = ['git', 'ls-tree', '-z', treeish] 382 cmd = ['git', 'ls-tree', '-z', treeish]
379 if recursive: 383 if recursive:
380 cmd.append('-r') 384 cmd.append('-r')
@@ -387,6 +391,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
387 ret[split[3]] = split[0:3] 391 ret[split[3]] = split[0:3]
388 return ret 392 return ret
389 393
394def _git_modified(repodir):
395 """List the difference between HEAD and the index"""
396 import bb.process
397 cmd = ['git', 'status', '--porcelain']
398 out, _ = bb.process.run(cmd, cwd=repodir)
399 ret = []
400 if out:
401 for line in out.split("\n"):
402 if line and not line.startswith('??'):
403 ret.append(line[3:])
404 return ret
405
406
390def _git_exclude_path(srctree, path): 407def _git_exclude_path(srctree, path):
391 """Return pathspec (list of paths) that excludes certain path""" 408 """Return pathspec (list of paths) that excludes certain path"""
392 # NOTE: "Filtering out" files/paths in this way is not entirely reliable - 409 # NOTE: "Filtering out" files/paths in this way is not entirely reliable -
@@ -414,8 +431,6 @@ def _ls_tree(directory):
414 431
415def extract(args, config, basepath, workspace): 432def extract(args, config, basepath, workspace):
416 """Entry point for the devtool 'extract' subcommand""" 433 """Entry point for the devtool 'extract' subcommand"""
417 import bb
418
419 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 434 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
420 if not tinfoil: 435 if not tinfoil:
421 # Error already shown 436 # Error already shown
@@ -438,8 +453,6 @@ def extract(args, config, basepath, workspace):
438 453
439def sync(args, config, basepath, workspace): 454def sync(args, config, basepath, workspace):
440 """Entry point for the devtool 'sync' subcommand""" 455 """Entry point for the devtool 'sync' subcommand"""
441 import bb
442
443 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 456 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
444 if not tinfoil: 457 if not tinfoil:
445 # Error already shown 458 # Error already shown
@@ -460,37 +473,11 @@ def sync(args, config, basepath, workspace):
460 finally: 473 finally:
461 tinfoil.shutdown() 474 tinfoil.shutdown()
462 475
463def symlink_oelocal_files_srctree(rd, srctree):
464 import oe.patch
465 if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')):
466 # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
467 # (otherwise the recipe won't build as expected)
468 local_files_dir = os.path.join(srctree, 'oe-local-files')
469 addfiles = []
470 for root, _, files in os.walk(local_files_dir):
471 relpth = os.path.relpath(root, local_files_dir)
472 if relpth != '.':
473 bb.utils.mkdirhier(os.path.join(srctree, relpth))
474 for fn in files:
475 if fn == '.gitignore':
476 continue
477 destpth = os.path.join(srctree, relpth, fn)
478 if os.path.exists(destpth):
479 os.unlink(destpth)
480 if relpth != '.':
481 back_relpth = os.path.relpath(local_files_dir, root)
482 os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth)
483 else:
484 os.symlink('oe-local-files/%s' % fn, destpth)
485 addfiles.append(os.path.join(relpth, fn))
486 if addfiles:
487 oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd)
488
489def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): 476def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
490 """Extract sources of a recipe""" 477 """Extract sources of a recipe"""
491 import oe.recipeutils
492 import oe.patch
493 import oe.path 478 import oe.path
479 import bb.data
480 import bb.process
494 481
495 pn = d.getVar('PN') 482 pn = d.getVar('PN')
496 483
@@ -555,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
555 tempbasedir = d.getVar('WORKDIR') 542 tempbasedir = d.getVar('WORKDIR')
556 bb.utils.mkdirhier(tempbasedir) 543 bb.utils.mkdirhier(tempbasedir)
557 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) 544 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir)
545 appendbackup = None
558 try: 546 try:
559 tinfoil.logger.setLevel(logging.WARNING) 547 tinfoil.logger.setLevel(logging.WARNING)
560 548
@@ -565,7 +553,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
565 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') 553 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak')
566 shutil.copyfile(appendfile, appendbackup) 554 shutil.copyfile(appendfile, appendbackup)
567 else: 555 else:
568 appendbackup = None
569 bb.utils.mkdirhier(os.path.dirname(appendfile)) 556 bb.utils.mkdirhier(os.path.dirname(appendfile))
570 logger.debug('writing append file %s' % appendfile) 557 logger.debug('writing append file %s' % appendfile)
571 with open(appendfile, 'a') as f: 558 with open(appendfile, 'a') as f:
@@ -638,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
638 srcsubdir = f.read() 625 srcsubdir = f.read()
639 except FileNotFoundError as e: 626 except FileNotFoundError as e:
640 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) 627 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
641 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) 628 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR'))))
642 629
643 # Check if work-shared is empty, if yes 630 # Check if work-shared is empty, if yes
644 # find source and copy to work-shared 631 # find source and copy to work-shared
@@ -657,35 +644,22 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
657 elif not os.path.exists(workshareddir): 644 elif not os.path.exists(workshareddir):
658 oe.path.copyhardlinktree(srcsubdir, workshareddir) 645 oe.path.copyhardlinktree(srcsubdir, workshareddir)
659 646
660 tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
661 srctree_localdir = os.path.join(srctree, 'oe-local-files')
662
663 if sync: 647 if sync:
664 bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) 648 try:
665 649 logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch))
666 # Move the oe-local-files directory to srctree. 650 bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree)
667 # As oe-local-files is not part of the constructed git tree, 651
668 # removing it directly during the synchronization might surprise 652 # Use git fetch to update the source with the current recipe
669 # the user. Instead, we move it to oe-local-files.bak and remind 653 # To be able to update the currently checked out branch with
670 # the user in the log message. 654 # possibly new history (no fast-forward) git needs to be told
671 if os.path.exists(srctree_localdir + '.bak'): 655 # that's ok
672 shutil.rmtree(srctree_localdir + '.bak') 656 logger.info('Syncing source files including patches to git branch: %s' % devbranch)
673 657 bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
674 if os.path.exists(srctree_localdir): 658 except bb.process.ExecutionError as e:
675 logger.info('Backing up current local file directory %s' % srctree_localdir) 659 raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e))
676 shutil.move(srctree_localdir, srctree_localdir + '.bak')
677
678 if os.path.exists(tempdir_localdir):
679 logger.info('Syncing local source files to srctree...')
680 shutil.copytree(tempdir_localdir, srctree_localdir)
681 else:
682 # Move oe-local-files directory to srctree
683 if os.path.exists(tempdir_localdir):
684 logger.info('Adding local source files to srctree...')
685 shutil.move(tempdir_localdir, srcsubdir)
686 660
661 else:
687 shutil.move(srcsubdir, srctree) 662 shutil.move(srcsubdir, srctree)
688 symlink_oelocal_files_srctree(d, srctree)
689 663
690 if is_kernel_yocto: 664 if is_kernel_yocto:
691 logger.info('Copying kernel config to srctree') 665 logger.info('Copying kernel config to srctree')
@@ -704,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
704 678
705def _add_md5(config, recipename, filename): 679def _add_md5(config, recipename, filename):
706 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" 680 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace"""
707 import bb.utils
708
709 def addfile(fn): 681 def addfile(fn):
710 md5 = bb.utils.md5_file(fn) 682 md5 = bb.utils.md5_file(fn)
711 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: 683 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f:
@@ -724,7 +696,6 @@ def _add_md5(config, recipename, filename):
724def _check_preserve(config, recipename): 696def _check_preserve(config, recipename):
725 """Check if a file was manually changed and needs to be saved in 'attic' 697 """Check if a file was manually changed and needs to be saved in 'attic'
726 directory""" 698 directory"""
727 import bb.utils
728 origfile = os.path.join(config.workspace_path, '.devtool_md5') 699 origfile = os.path.join(config.workspace_path, '.devtool_md5')
729 newfile = os.path.join(config.workspace_path, '.devtool_md5_new') 700 newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
730 preservepath = os.path.join(config.workspace_path, 'attic', recipename) 701 preservepath = os.path.join(config.workspace_path, 'attic', recipename)
@@ -755,36 +726,36 @@ def _check_preserve(config, recipename):
755 726
756def get_staging_kver(srcdir): 727def get_staging_kver(srcdir):
757 # Kernel version from work-shared 728 # Kernel version from work-shared
758 kerver = [] 729 import itertools
759 staging_kerVer="" 730 try:
760 if os.path.exists(srcdir) and os.listdir(srcdir):
761 with open(os.path.join(srcdir, "Makefile")) as f: 731 with open(os.path.join(srcdir, "Makefile")) as f:
762 version = [next(f) for x in range(5)][1:4] 732 # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3
763 for word in version: 733 return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4))
764 kerver.append(word.split('= ')[1].split('\n')[0]) 734 except FileNotFoundError:
765 staging_kerVer = ".".join(kerver) 735 return ""
766 return staging_kerVer
767 736
768def get_staging_kbranch(srcdir): 737def get_staging_kbranch(srcdir):
738 import bb.process
769 staging_kbranch = "" 739 staging_kbranch = ""
770 if os.path.exists(srcdir) and os.listdir(srcdir): 740 if os.path.exists(srcdir) and os.listdir(srcdir):
771 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) 741 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
772 staging_kbranch = "".join(branch.split('\n')[0]) 742 staging_kbranch = "".join(branch.split('\n')[0])
773 return staging_kbranch 743 return staging_kbranch
774 744
775def get_real_srctree(srctree, s, workdir): 745def get_real_srctree(srctree, s, unpackdir):
776 # Check that recipe isn't using a shared workdir 746 # Check that recipe isn't using a shared workdir
777 s = os.path.abspath(s) 747 s = os.path.abspath(s)
778 workdir = os.path.abspath(workdir) 748 unpackdir = os.path.abspath(unpackdir)
779 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: 749 if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir:
780 # Handle if S is set to a subdirectory of the source 750 # Handle if S is set to a subdirectory of the source
781 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] 751 srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1]
782 srctree = os.path.join(srctree, srcsubdir) 752 srctree = os.path.join(srctree, srcsubdir)
783 return srctree 753 return srctree
784 754
785def modify(args, config, basepath, workspace): 755def modify(args, config, basepath, workspace):
786 """Entry point for the devtool 'modify' subcommand""" 756 """Entry point for the devtool 'modify' subcommand"""
787 import bb 757 import bb.data
758 import bb.process
788 import oe.recipeutils 759 import oe.recipeutils
789 import oe.patch 760 import oe.patch
790 import oe.path 761 import oe.path
@@ -840,35 +811,21 @@ def modify(args, config, basepath, workspace):
840 staging_kbranch = get_staging_kbranch(srcdir) 811 staging_kbranch = get_staging_kbranch(srcdir)
841 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): 812 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
842 oe.path.copyhardlinktree(srcdir, srctree) 813 oe.path.copyhardlinktree(srcdir, srctree)
843 workdir = rd.getVar('WORKDIR') 814 unpackdir = rd.getVar('UNPACKDIR')
844 srcsubdir = rd.getVar('S') 815 srcsubdir = rd.getVar('S')
845 localfilesdir = os.path.join(srctree, 'oe-local-files')
846 # Move local source files into separate subdir
847 recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)]
848 local_files = oe.recipeutils.get_recipe_local_files(rd)
849 816
850 for key in local_files.copy(): 817 # Add locally copied files to gitignore as we add back to the metadata directly
851 if key.endswith('scc'): 818 local_files = oe.recipeutils.get_recipe_local_files(rd)
852 sccfile = open(local_files[key], 'r')
853 for l in sccfile:
854 line = l.split()
855 if line and line[0] in ('kconf', 'patch'):
856 cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
857 if not cfg in local_files.values():
858 local_files[line[-1]] = cfg
859 shutil.copy2(cfg, workdir)
860 sccfile.close()
861
862 # Ignore local files with subdir={BP}
863 srcabspath = os.path.abspath(srcsubdir) 819 srcabspath = os.path.abspath(srcsubdir)
864 local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] 820 local_files = [fname for fname in local_files if
821 os.path.exists(os.path.join(unpackdir, fname)) and
822 srcabspath == unpackdir]
865 if local_files: 823 if local_files:
866 for fname in local_files: 824 with open(os.path.join(srctree, '.gitignore'), 'a+') as f:
867 _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) 825 f.write('# Ignore local files, by default. Remove following lines'
868 with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: 826 'if you want to commit the directory to Git\n')
869 f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n') 827 for fname in local_files:
870 828 f.write('%s\n' % fname)
871 symlink_oelocal_files_srctree(rd, srctree)
872 829
873 task = 'do_configure' 830 task = 'do_configure'
874 res = tinfoil.build_targets(pn, task, handle_events=True) 831 res = tinfoil.build_targets(pn, task, handle_events=True)
@@ -893,7 +850,10 @@ def modify(args, config, basepath, workspace):
893 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) 850 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
894 commits["."] = stdout.split() 851 commits["."] = stdout.split()
895 check_commits = True 852 check_commits = True
896 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) 853 try:
854 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
855 except bb.process.ExecutionError:
856 stdout = ""
897 for line in stdout.splitlines(): 857 for line in stdout.splitlines():
898 (rev, submodule_path) = line.split() 858 (rev, submodule_path) = line.split()
899 submodule = os.path.relpath(submodule_path, srctree) 859 submodule = os.path.relpath(submodule_path, srctree)
@@ -947,7 +907,7 @@ def modify(args, config, basepath, workspace):
947 907
948 # Need to grab this here in case the source is within a subdirectory 908 # Need to grab this here in case the source is within a subdirectory
949 srctreebase = srctree 909 srctreebase = srctree
950 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) 910 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR'))
951 911
952 bb.utils.mkdirhier(os.path.dirname(appendfile)) 912 bb.utils.mkdirhier(os.path.dirname(appendfile))
953 with open(appendfile, 'w') as f: 913 with open(appendfile, 'w') as f:
@@ -987,13 +947,6 @@ def modify(args, config, basepath, workspace):
987 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) 947 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
988 948
989 if bb.data.inherits_class('kernel', rd): 949 if bb.data.inherits_class('kernel', rd):
990 f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
991 'do_fetch do_unpack do_kernel_configcheck"\n')
992 f.write('\ndo_patch[noexec] = "1"\n')
993 f.write('\ndo_configure:append() {\n'
994 ' cp ${B}/.config ${S}/.config.baseline\n'
995 ' ln -sfT ${B}/.config ${S}/.config.new\n'
996 '}\n')
997 f.write('\ndo_kernel_configme:prepend() {\n' 950 f.write('\ndo_kernel_configme:prepend() {\n'
998 ' if [ -e ${S}/.config ]; then\n' 951 ' if [ -e ${S}/.config ]; then\n'
999 ' mv ${S}/.config ${S}/.config.old\n' 952 ' mv ${S}/.config ${S}/.config.old\n'
@@ -1017,6 +970,8 @@ def modify(args, config, basepath, workspace):
1017 if branch == args.branch: 970 if branch == args.branch:
1018 continue 971 continue
1019 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) 972 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch])))
973 if args.debug_build:
974 f.write('\nDEBUG_BUILD = "1"\n')
1020 975
1021 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) 976 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
1022 977
@@ -1061,6 +1016,7 @@ def rename(args, config, basepath, workspace):
1061 origfnver = '' 1016 origfnver = ''
1062 1017
1063 recipefilemd5 = None 1018 recipefilemd5 = None
1019 newrecipefilemd5 = None
1064 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 1020 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1065 try: 1021 try:
1066 rd = parse_recipe(config, tinfoil, args.recipename, True) 1022 rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -1138,6 +1094,7 @@ def rename(args, config, basepath, workspace):
1138 1094
1139 # Rename source tree if it's the default path 1095 # Rename source tree if it's the default path
1140 appendmd5 = None 1096 appendmd5 = None
1097 newappendmd5 = None
1141 if not args.no_srctree: 1098 if not args.no_srctree:
1142 srctree = workspace[args.recipename]['srctree'] 1099 srctree = workspace[args.recipename]['srctree']
1143 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): 1100 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename):
@@ -1226,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre
1226 """Get initial and update rev of a recipe. These are the start point of the 1183 """Get initial and update rev of a recipe. These are the start point of the
1227 whole patchset and start point for the patches to be re-generated/updated. 1184 whole patchset and start point for the patches to be re-generated/updated.
1228 """ 1185 """
1229 import bb 1186 import bb.process
1230 1187
1231 # Get current branch 1188 # Get current branch
1232 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', 1189 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
@@ -1352,6 +1309,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1352 """ 1309 """
1353 import oe.recipeutils 1310 import oe.recipeutils
1354 from oe.patch import GitApplyTree 1311 from oe.patch import GitApplyTree
1312 import bb.process
1355 updated = OrderedDict() 1313 updated = OrderedDict()
1356 added = OrderedDict() 1314 added = OrderedDict()
1357 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') 1315 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
@@ -1373,6 +1331,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1373 # values, but they ought to be anyway... 1331 # values, but they ought to be anyway...
1374 new_basename = seqpatch_re.match(new_patch).group(2) 1332 new_basename = seqpatch_re.match(new_patch).group(2)
1375 match_name = None 1333 match_name = None
1334 old_patch = None
1376 for old_patch in existing_patches: 1335 for old_patch in existing_patches:
1377 old_basename = seqpatch_re.match(old_patch).group(2) 1336 old_basename = seqpatch_re.match(old_patch).group(2)
1378 old_basename_splitext = os.path.splitext(old_basename) 1337 old_basename_splitext = os.path.splitext(old_basename)
@@ -1421,6 +1380,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1421 1380
1422def _create_kconfig_diff(srctree, rd, outfile): 1381def _create_kconfig_diff(srctree, rd, outfile):
1423 """Create a kconfig fragment""" 1382 """Create a kconfig fragment"""
1383 import bb.process
1424 # Only update config fragment if both config files exist 1384 # Only update config fragment if both config files exist
1425 orig_config = os.path.join(srctree, '.config.baseline') 1385 orig_config = os.path.join(srctree, '.config.baseline')
1426 new_config = os.path.join(srctree, '.config.new') 1386 new_config = os.path.join(srctree, '.config.new')
@@ -1452,16 +1412,21 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1452 1. updated - files that already exist in SRCURI 1412 1. updated - files that already exist in SRCURI
1453 2. added - new files files that don't exist in SRCURI 1413 2. added - new files files that don't exist in SRCURI
1454 3 removed - files that exist in SRCURI but not in exported files 1414 3 removed - files that exist in SRCURI but not in exported files
1455 In each dict the key is the 'basepath' of the URI and value is the 1415 In each dict the key is the 'basepath' of the URI and value is:
1456 absolute path to the existing file in recipe space (if any). 1416 - for updated and added dicts, a dict with 1 optionnal key:
1417 - 'path': the absolute path to the existing file in recipe space (if any)
1418 - for removed dict, the absolute path to the existing file in recipe space
1457 """ 1419 """
1458 import oe.recipeutils 1420 import oe.recipeutils
1421 import bb.data
1422 import bb.process
1459 1423
1460 # Find out local files (SRC_URI files that exist in the "recipe space"). 1424 # Find out local files (SRC_URI files that exist in the "recipe space").
1461 # Local files that reside in srctree are not included in patch generation. 1425 # Local files that reside in srctree are not included in patch generation.
1462 # Instead they are directly copied over the original source files (in 1426 # Instead they are directly copied over the original source files (in
1463 # recipe space). 1427 # recipe space).
1464 existing_files = oe.recipeutils.get_recipe_local_files(rd) 1428 existing_files = oe.recipeutils.get_recipe_local_files(rd)
1429
1465 new_set = None 1430 new_set = None
1466 updated = OrderedDict() 1431 updated = OrderedDict()
1467 added = OrderedDict() 1432 added = OrderedDict()
@@ -1478,24 +1443,28 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1478 if branchname.startswith(override_branch_prefix): 1443 if branchname.startswith(override_branch_prefix):
1479 return (updated, added, removed) 1444 return (updated, added, removed)
1480 1445
1481 local_files_dir = os.path.join(srctreebase, 'oe-local-files') 1446 files = _git_modified(srctree)
1482 git_files = _git_ls_tree(srctree) 1447 #if not files:
1483 if 'oe-local-files' in git_files: 1448 # files = _ls_tree(srctree)
1484 # If tracked by Git, take the files from srctree HEAD. First get 1449 for f in files:
1485 # the tree object of the directory 1450 fullfile = os.path.join(srctree, f)
1486 tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') 1451 if os.path.exists(os.path.join(fullfile, ".git")):
1487 tree = git_files['oe-local-files'][2] 1452 # submodules handled elsewhere
1488 bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, 1453 continue
1489 env=dict(os.environ, GIT_WORK_TREE=destdir, 1454 if f not in existing_files:
1490 GIT_INDEX_FILE=tmp_index)) 1455 added[f] = {}
1491 new_set = list(_git_ls_tree(srctree, tree, True).keys()) 1456 if os.path.isdir(os.path.join(srctree, f)):
1492 elif os.path.isdir(local_files_dir): 1457 shutil.copytree(fullfile, os.path.join(destdir, f))
1493 # If not tracked by Git, just copy from working copy 1458 else:
1494 new_set = _ls_tree(local_files_dir) 1459 shutil.copy2(fullfile, os.path.join(destdir, f))
1495 bb.process.run(['cp', '-ax', 1460 elif not os.path.exists(fullfile):
1496 os.path.join(local_files_dir, '.'), destdir]) 1461 removed[f] = existing_files[f]
1497 else: 1462 elif f in existing_files:
1498 new_set = [] 1463 updated[f] = {'path' : existing_files[f]}
1464 if os.path.isdir(os.path.join(srctree, f)):
1465 shutil.copytree(fullfile, os.path.join(destdir, f))
1466 else:
1467 shutil.copy2(fullfile, os.path.join(destdir, f))
1499 1468
1500 # Special handling for kernel config 1469 # Special handling for kernel config
1501 if bb.data.inherits_class('kernel-yocto', rd): 1470 if bb.data.inherits_class('kernel-yocto', rd):
@@ -1503,17 +1472,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1503 fragment_path = os.path.join(destdir, fragment_fn) 1472 fragment_path = os.path.join(destdir, fragment_fn)
1504 if _create_kconfig_diff(srctree, rd, fragment_path): 1473 if _create_kconfig_diff(srctree, rd, fragment_path):
1505 if os.path.exists(fragment_path): 1474 if os.path.exists(fragment_path):
1506 if fragment_fn not in new_set: 1475 if fragment_fn in removed:
1507 new_set.append(fragment_fn) 1476 del removed[fragment_fn]
1508 # Copy fragment to local-files 1477 if fragment_fn not in updated and fragment_fn not in added:
1509 if os.path.isdir(local_files_dir): 1478 added[fragment_fn] = {}
1510 shutil.copy2(fragment_path, local_files_dir)
1511 else: 1479 else:
1512 if fragment_fn in new_set: 1480 if fragment_fn in updated:
1513 new_set.remove(fragment_fn) 1481 removed[fragment_fn] = updated[fragment_fn]
1514 # Remove fragment from local-files 1482 del updated[fragment_fn]
1515 if os.path.exists(os.path.join(local_files_dir, fragment_fn)):
1516 os.unlink(os.path.join(local_files_dir, fragment_fn))
1517 1483
1518 # Special handling for cml1, ccmake, etc bbclasses that generated 1484 # Special handling for cml1, ccmake, etc bbclasses that generated
1519 # configuration fragment files that are consumed as source files 1485 # configuration fragment files that are consumed as source files
@@ -1521,42 +1487,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1521 if bb.data.inherits_class(frag_class, rd): 1487 if bb.data.inherits_class(frag_class, rd):
1522 srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) 1488 srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name)
1523 if os.path.exists(srcpath): 1489 if os.path.exists(srcpath):
1524 if frag_name not in new_set: 1490 if frag_name in removed:
1525 new_set.append(frag_name) 1491 del removed[frag_name]
1492 if frag_name not in updated:
1493 added[frag_name] = {}
1526 # copy fragment into destdir 1494 # copy fragment into destdir
1527 shutil.copy2(srcpath, destdir) 1495 shutil.copy2(srcpath, destdir)
1528 # copy fragment into local files if exists 1496
1529 if os.path.isdir(local_files_dir):
1530 shutil.copy2(srcpath, local_files_dir)
1531
1532 if new_set is not None:
1533 for fname in new_set:
1534 if fname in existing_files:
1535 origpath = existing_files.pop(fname)
1536 workpath = os.path.join(local_files_dir, fname)
1537 if not filecmp.cmp(origpath, workpath):
1538 updated[fname] = origpath
1539 elif fname != '.gitignore':
1540 added[fname] = None
1541
1542 workdir = rd.getVar('WORKDIR')
1543 s = rd.getVar('S')
1544 if not s.endswith(os.sep):
1545 s += os.sep
1546
1547 if workdir != s:
1548 # Handle files where subdir= was specified
1549 for fname in list(existing_files.keys()):
1550 # FIXME handle both subdir starting with BP and not?
1551 fworkpath = os.path.join(workdir, fname)
1552 if fworkpath.startswith(s):
1553 fpath = os.path.join(srctree, os.path.relpath(fworkpath, s))
1554 if os.path.exists(fpath):
1555 origpath = existing_files.pop(fname)
1556 if not filecmp.cmp(origpath, fpath):
1557 updated[fpath] = origpath
1558
1559 removed = existing_files
1560 return (updated, added, removed) 1497 return (updated, added, removed)
1561 1498
1562 1499
@@ -1574,7 +1511,7 @@ def _determine_files_dir(rd):
1574 1511
1575def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): 1512def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None):
1576 """Implement the 'srcrev' mode of update-recipe""" 1513 """Implement the 'srcrev' mode of update-recipe"""
1577 import bb 1514 import bb.process
1578 import oe.recipeutils 1515 import oe.recipeutils
1579 1516
1580 dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' 1517 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
@@ -1612,6 +1549,7 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1612 local_files_dir = tempfile.mkdtemp(dir=tempdir) 1549 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1613 srctreebase = workspace[recipename]['srctreebase'] 1550 srctreebase = workspace[recipename]['srctreebase']
1614 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) 1551 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1552 removedentries = {}
1615 if not no_remove: 1553 if not no_remove:
1616 # Find list of existing patches in recipe file 1554 # Find list of existing patches in recipe file
1617 patches_dir = tempfile.mkdtemp(dir=tempdir) 1555 patches_dir = tempfile.mkdtemp(dir=tempdir)
@@ -1640,7 +1578,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1640 redirect_output=dry_run_outdir) 1578 redirect_output=dry_run_outdir)
1641 else: 1579 else:
1642 files_dir = _determine_files_dir(rd) 1580 files_dir = _determine_files_dir(rd)
1643 for basepath, path in upd_f.items(): 1581 for basepath, param in upd_f.items():
1582 path = param['path']
1644 logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) 1583 logger.info('Updating file %s%s' % (basepath, dry_run_suffix))
1645 if os.path.isabs(basepath): 1584 if os.path.isabs(basepath):
1646 # Original file (probably with subdir pointing inside source tree) 1585 # Original file (probably with subdir pointing inside source tree)
@@ -1650,7 +1589,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1650 _move_file(os.path.join(local_files_dir, basepath), path, 1589 _move_file(os.path.join(local_files_dir, basepath), path,
1651 dry_run_outdir=dry_run_outdir, base_outdir=recipedir) 1590 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1652 update_srcuri= True 1591 update_srcuri= True
1653 for basepath, path in new_f.items(): 1592 for basepath, param in new_f.items():
1593 path = param['path']
1654 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) 1594 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1655 _move_file(os.path.join(local_files_dir, basepath), 1595 _move_file(os.path.join(local_files_dir, basepath),
1656 os.path.join(files_dir, basepath), 1596 os.path.join(files_dir, basepath),
@@ -1673,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1673 1613
1674def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): 1614def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False):
1675 """Implement the 'patch' mode of update-recipe""" 1615 """Implement the 'patch' mode of update-recipe"""
1676 import bb
1677 import oe.recipeutils 1616 import oe.recipeutils
1678 1617
1679 recipefile = rd.getVar('FILE') 1618 recipefile = rd.getVar('FILE')
@@ -1772,7 +1711,8 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1772 else: 1711 else:
1773 # Update existing files 1712 # Update existing files
1774 files_dir = _determine_files_dir(rd) 1713 files_dir = _determine_files_dir(rd)
1775 for basepath, path in upd_f.items(): 1714 for basepath, param in upd_f.items():
1715 path = param['path']
1776 logger.info('Updating file %s' % basepath) 1716 logger.info('Updating file %s' % basepath)
1777 if os.path.isabs(basepath): 1717 if os.path.isabs(basepath):
1778 # Original file (probably with subdir pointing inside source tree) 1718 # Original file (probably with subdir pointing inside source tree)
@@ -1786,6 +1726,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1786 for basepath, param in upd_p.items(): 1726 for basepath, param in upd_p.items():
1787 path = param['path'] 1727 path = param['path']
1788 patchdir = param.get('patchdir', ".") 1728 patchdir = param.get('patchdir', ".")
1729 patchdir_param = {}
1789 if patchdir != "." : 1730 if patchdir != "." :
1790 patchdir_param = dict(patchdir_params) 1731 patchdir_param = dict(patchdir_params)
1791 if patchdir_param: 1732 if patchdir_param:
@@ -1806,7 +1747,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1806 dry_run_outdir=dry_run_outdir, base_outdir=recipedir) 1747 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1807 updatefiles = True 1748 updatefiles = True
1808 # Add any new files 1749 # Add any new files
1809 for basepath, path in new_f.items(): 1750 for basepath, param in new_f.items():
1810 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) 1751 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1811 _move_file(os.path.join(local_files_dir, basepath), 1752 _move_file(os.path.join(local_files_dir, basepath),
1812 os.path.join(files_dir, basepath), 1753 os.path.join(files_dir, basepath),
@@ -1851,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1851 1792
1852def _guess_recipe_update_mode(srctree, rdata): 1793def _guess_recipe_update_mode(srctree, rdata):
1853 """Guess the recipe update mode to use""" 1794 """Guess the recipe update mode to use"""
1795 import bb.process
1854 src_uri = (rdata.getVar('SRC_URI') or '').split() 1796 src_uri = (rdata.getVar('SRC_URI') or '').split()
1855 git_uris = [uri for uri in src_uri if uri.startswith('git://')] 1797 git_uris = [uri for uri in src_uri if uri.startswith('git://')]
1856 if not git_uris: 1798 if not git_uris:
@@ -1872,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata):
1872 return 'patch' 1814 return 'patch'
1873 1815
1874def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): 1816def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False):
1817 import bb.data
1818 import bb.process
1875 srctree = workspace[recipename]['srctree'] 1819 srctree = workspace[recipename]['srctree']
1876 if mode == 'auto': 1820 if mode == 'auto':
1877 mode = _guess_recipe_update_mode(srctree, rd) 1821 mode = _guess_recipe_update_mode(srctree, rd)
@@ -1994,6 +1938,7 @@ def status(args, config, basepath, workspace):
1994 1938
1995def _reset(recipes, no_clean, remove_work, config, basepath, workspace): 1939def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1996 """Reset one or more recipes""" 1940 """Reset one or more recipes"""
1941 import bb.process
1997 import oe.path 1942 import oe.path
1998 1943
1999 def clean_preferred_provider(pn, layerconf_path): 1944 def clean_preferred_provider(pn, layerconf_path):
@@ -2006,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
2006 lines = f.readlines() 1951 lines = f.readlines()
2007 with open(new_layerconf_file, 'a') as nf: 1952 with open(new_layerconf_file, 'a') as nf:
2008 for line in lines: 1953 for line in lines:
2009 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' 1954 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$'
2010 if not re.match(pprovider_exp, line): 1955 if not re.match(pprovider_exp, line):
2011 nf.write(line) 1956 nf.write(line)
2012 else: 1957 else:
@@ -2097,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
2097 2042
2098def reset(args, config, basepath, workspace): 2043def reset(args, config, basepath, workspace):
2099 """Entry point for the devtool 'reset' subcommand""" 2044 """Entry point for the devtool 'reset' subcommand"""
2100 import bb
2101 import shutil
2102 2045
2103 recipes = "" 2046 recipes = ""
2104 2047
@@ -2377,6 +2320,7 @@ def register_commands(subparsers, context):
2377 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') 2320 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
2378 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') 2321 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2379 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") 2322 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
2323 parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe')
2380 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) 2324 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup)
2381 2325
2382 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', 2326 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
index fa5b8ef3c7..d9aca6e2db 100644
--- a/scripts/lib/devtool/upgrade.py
+++ b/scripts/lib/devtool/upgrade.py
@@ -32,7 +32,7 @@ def _run(cmd, cwd=''):
32 32
33def _get_srctree(tmpdir): 33def _get_srctree(tmpdir):
34 srctree = tmpdir 34 srctree = tmpdir
35 dirs = scriptutils.filter_src_subdirs(tmpdir) 35 dirs = os.listdir(tmpdir)
36 if len(dirs) == 1: 36 if len(dirs) == 1:
37 srctree = os.path.join(tmpdir, dirs[0]) 37 srctree = os.path.join(tmpdir, dirs[0])
38 else: 38 else:
@@ -76,19 +76,19 @@ def _rename_recipe_dirs(oldpv, newpv, path):
76 bb.utils.rename(os.path.join(path, oldfile), 76 bb.utils.rename(os.path.join(path, oldfile),
77 os.path.join(path, newfile)) 77 os.path.join(path, newfile))
78 78
79def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): 79def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path):
80 oldrecipe = os.path.basename(oldrecipe) 80 oldrecipe = os.path.basename(oldrecipe)
81 if oldrecipe.endswith('_%s.bb' % oldpv): 81 if oldrecipe.endswith('_%s.bb' % oldpv):
82 newrecipe = '%s_%s.bb' % (bpn, newpv) 82 newrecipe = '%s_%s.bb' % (pn, newpv)
83 if oldrecipe != newrecipe: 83 if oldrecipe != newrecipe:
84 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) 84 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
85 else: 85 else:
86 newrecipe = oldrecipe 86 newrecipe = oldrecipe
87 return os.path.join(path, newrecipe) 87 return os.path.join(path, newrecipe)
88 88
89def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): 89def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path):
90 _rename_recipe_dirs(oldpv, newpv, path) 90 _rename_recipe_dirs(oldpv, newpv, path)
91 return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) 91 return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path)
92 92
93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): 93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
94 """Writes an append file""" 94 """Writes an append file"""
@@ -169,6 +169,7 @@ def _get_uri(rd):
169 169
170def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): 170def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
171 """Extract sources of a recipe with a new version""" 171 """Extract sources of a recipe with a new version"""
172 import oe.patch
172 173
173 def __run(cmd): 174 def __run(cmd):
174 """Simple wrapper which calls _run with srctree as cwd""" 175 """Simple wrapper which calls _run with srctree as cwd"""
@@ -187,9 +188,9 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
187 if uri.startswith('git://') or uri.startswith('gitsm://'): 188 if uri.startswith('git://') or uri.startswith('gitsm://'):
188 __run('git fetch') 189 __run('git fetch')
189 __run('git checkout %s' % rev) 190 __run('git checkout %s' % rev)
190 __run('git tag -f devtool-base-new') 191 __run('git tag -f --no-sign devtool-base-new')
191 __run('git submodule update --recursive') 192 __run('git submodule update --recursive')
192 __run('git submodule foreach \'git tag -f devtool-base-new\'') 193 __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'')
193 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') 194 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
194 paths += [os.path.join(srctree, p) for p in stdout.splitlines()] 195 paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
195 checksums = {} 196 checksums = {}
@@ -256,7 +257,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
256 useroptions = [] 257 useroptions = []
257 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) 258 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
258 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) 259 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
259 __run('git tag -f devtool-base-%s' % newpv) 260 __run('git tag -f --no-sign devtool-base-%s' % newpv)
260 261
261 revs = {} 262 revs = {}
262 for path in paths: 263 for path in paths:
@@ -335,19 +336,19 @@ def _add_license_diff_to_recipe(path, diff):
335def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): 336def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
336 """Creates the new recipe under workspace""" 337 """Creates the new recipe under workspace"""
337 338
338 bpn = rd.getVar('BPN') 339 pn = rd.getVar('PN')
339 path = os.path.join(workspace, 'recipes', bpn) 340 path = os.path.join(workspace, 'recipes', pn)
340 bb.utils.mkdirhier(path) 341 bb.utils.mkdirhier(path)
341 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) 342 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
342 if not copied: 343 if not copied:
343 raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) 344 raise DevtoolError('Internal error - no files were copied for recipe %s' % pn)
344 logger.debug('Copied %s to %s' % (copied, path)) 345 logger.debug('Copied %s to %s' % (copied, path))
345 346
346 oldpv = rd.getVar('PV') 347 oldpv = rd.getVar('PV')
347 if not newpv: 348 if not newpv:
348 newpv = oldpv 349 newpv = oldpv
349 origpath = rd.getVar('FILE') 350 origpath = rd.getVar('FILE')
350 fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) 351 fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path)
351 logger.debug('Upgraded %s => %s' % (origpath, fullpath)) 352 logger.debug('Upgraded %s => %s' % (origpath, fullpath))
352 353
353 newvalues = {} 354 newvalues = {}
@@ -534,6 +535,15 @@ def _generate_license_diff(old_licenses, new_licenses):
534 diff = diff + line 535 diff = diff + line
535 return diff 536 return diff
536 537
538def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil):
539 tasks = []
540 for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split():
541 logger.info('Running extra recipe upgrade task: %s' % task)
542 res = tinfoil.build_targets(pn, task, handle_events=True)
543
544 if not res:
545 raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn))
546
537def upgrade(args, config, basepath, workspace): 547def upgrade(args, config, basepath, workspace):
538 """Entry point for the devtool 'upgrade' subcommand""" 548 """Entry point for the devtool 'upgrade' subcommand"""
539 549
@@ -561,7 +571,7 @@ def upgrade(args, config, basepath, workspace):
561 else: 571 else:
562 srctree = standard.get_default_srctree(config, pn) 572 srctree = standard.get_default_srctree(config, pn)
563 573
564 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) 574 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR'))
565 575
566 # try to automatically discover latest version and revision if not provided on command line 576 # try to automatically discover latest version and revision if not provided on command line
567 if not args.version and not args.srcrev: 577 if not args.version and not args.srcrev:
@@ -601,7 +611,7 @@ def upgrade(args, config, basepath, workspace):
601 license_diff = _generate_license_diff(old_licenses, new_licenses) 611 license_diff = _generate_license_diff(old_licenses, new_licenses)
602 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) 612 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
603 except (bb.process.CmdError, DevtoolError) as e: 613 except (bb.process.CmdError, DevtoolError) as e:
604 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN')) 614 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN'))
605 _upgrade_error(e, recipedir, srctree, args.keep_failure) 615 _upgrade_error(e, recipedir, srctree, args.keep_failure)
606 standard._add_md5(config, pn, os.path.dirname(rf)) 616 standard._add_md5(config, pn, os.path.dirname(rf))
607 617
@@ -609,6 +619,8 @@ def upgrade(args, config, basepath, workspace):
609 copied, config.workspace_path, rd) 619 copied, config.workspace_path, rd)
610 standard._add_md5(config, pn, af) 620 standard._add_md5(config, pn, af)
611 621
622 _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil)
623
612 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) 624 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
613 625
614 logger.info('Upgraded source extracted to %s' % srctree) 626 logger.info('Upgraded source extracted to %s' % srctree)
@@ -643,18 +655,28 @@ def latest_version(args, config, basepath, workspace):
643 return 0 655 return 0
644 656
645def check_upgrade_status(args, config, basepath, workspace): 657def check_upgrade_status(args, config, basepath, workspace):
658 def _print_status(recipe):
659 print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'],
660 recipe['cur_ver'],
661 recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"),
662 recipe['maintainer'],
663 recipe['revision'] if recipe['revision'] != 'N/A' else "",
664 "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else ""))
646 if not args.recipe: 665 if not args.recipe:
647 logger.info("Checking the upstream status for all recipes may take a few minutes") 666 logger.info("Checking the upstream status for all recipes may take a few minutes")
648 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) 667 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
649 for result in results: 668 for recipegroup in results:
650 # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason 669 upgrades = [r for r in recipegroup if r['status'] != 'MATCH']
651 if args.all or result[1] != 'MATCH': 670 currents = [r for r in recipegroup if r['status'] == 'MATCH']
652 print("{:25} {:15} {:15} {} {} {}".format( result[0], 671 if len(upgrades) > 1:
653 result[2], 672 print("These recipes need to be upgraded together {")
654 result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), 673 for r in sorted(upgrades, key=lambda r:r['pn']):
655 result[4], 674 _print_status(r)
656 result[5] if result[5] != 'N/A' else "", 675 if len(upgrades) > 1:
657 "cannot be updated due to: %s" %(result[6]) if result[6] else "")) 676 print("}")
677 for r in currents:
678 if args.all:
679 _print_status(r)
658 680
659def register_commands(subparsers, context): 681def register_commands(subparsers, context):
660 """Register devtool subcommands from this plugin""" 682 """Register devtool subcommands from this plugin"""
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py
index 964817766b..bf39f71b11 100644
--- a/scripts/lib/devtool/utilcmds.py
+++ b/scripts/lib/devtool/utilcmds.py
@@ -64,7 +64,7 @@ def configure_help(args, config, basepath, workspace):
64 b = rd.getVar('B') 64 b = rd.getVar('B')
65 s = rd.getVar('S') 65 s = rd.getVar('S')
66 configurescript = os.path.join(s, 'configure') 66 configurescript = os.path.join(s, 'configure')
67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) 67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd))
68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') 68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') 69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') 70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index 341e893305..041d79f162 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -101,7 +101,7 @@ def determine_file_source(targetpath, rd):
101 import oe.recipeutils 101 import oe.recipeutils
102 102
103 # See if it's in do_install for the recipe 103 # See if it's in do_install for the recipe
104 workdir = rd.getVar('WORKDIR') 104 unpackdir = rd.getVar('UNPACKDIR')
105 src_uri = rd.getVar('SRC_URI') 105 src_uri = rd.getVar('SRC_URI')
106 srcfile = '' 106 srcfile = ''
107 modpatches = [] 107 modpatches = []
@@ -113,9 +113,9 @@ def determine_file_source(targetpath, rd):
113 if not srcpath.startswith('/'): 113 if not srcpath.startswith('/'):
114 # Handle non-absolute path 114 # Handle non-absolute path
115 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) 115 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
116 if srcpath.startswith(workdir): 116 if srcpath.startswith(unpackdir):
117 # OK, now we have the source file name, look for it in SRC_URI 117 # OK, now we have the source file name, look for it in SRC_URI
118 workdirfile = os.path.relpath(srcpath, workdir) 118 workdirfile = os.path.relpath(srcpath, unpackdir)
119 # FIXME this is where we ought to have some code in the fetcher, because this is naive 119 # FIXME this is where we ought to have some code in the fetcher, because this is naive
120 for item in src_uri.split(): 120 for item in src_uri.split():
121 localpath = bb.fetch2.localpath(item, rd) 121 localpath = bb.fetch2.localpath(item, rd)
@@ -317,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None):
317 import oe.recipeutils 317 import oe.recipeutils
318 318
319 srcdir = rd.getVar('S') 319 srcdir = rd.getVar('S')
320 workdir = rd.getVar('WORKDIR') 320 unpackdir = rd.getVar('UNPACKDIR')
321 321
322 import bb.fetch 322 import bb.fetch
323 simplified = {} 323 simplified = {}
@@ -336,10 +336,10 @@ def appendsrc(args, files, rd, extralines=None):
336 src_destdir = os.path.dirname(srcfile) 336 src_destdir = os.path.dirname(srcfile)
337 if not args.use_workdir: 337 if not args.use_workdir:
338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): 338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
339 srcdir = os.path.join(workdir, 'git') 339 srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX'))
340 if not bb.data.inherits_class('kernel-yocto', rd): 340 if not bb.data.inherits_class('kernel-yocto', rd):
341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') 341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}')
342 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) 342 src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir)
343 src_destdir = os.path.normpath(src_destdir) 343 src_destdir = os.path.normpath(src_destdir)
344 344
345 if src_destdir and src_destdir != '.': 345 if src_destdir and src_destdir != '.':
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 8e9ff38db6..ef0ba974a9 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit
18import hashlib 18import hashlib
19import bb.fetch2 19import bb.fetch2
20logger = logging.getLogger('recipetool') 20logger = logging.getLogger('recipetool')
21from oe.license import tidy_licenses
22from oe.license_finder import find_licenses
21 23
22tinfoil = None 24tinfoil = None
23plugins = None 25plugins = None
@@ -528,7 +530,7 @@ def create_recipe(args):
528 if ftmpdir and args.keep_temp: 530 if ftmpdir and args.keep_temp:
529 logger.info('Fetch temp directory is %s' % ftmpdir) 531 logger.info('Fetch temp directory is %s' % ftmpdir)
530 532
531 dirlist = scriptutils.filter_src_subdirs(srctree) 533 dirlist = os.listdir(srctree)
532 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 534 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
533 if len(dirlist) == 1: 535 if len(dirlist) == 1:
534 singleitem = os.path.join(srctree, dirlist[0]) 536 singleitem = os.path.join(srctree, dirlist[0])
@@ -637,7 +639,6 @@ def create_recipe(args):
637 if len(splitline) > 1: 639 if len(splitline) > 1:
638 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 640 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
639 srcuri = reformat_git_uri(splitline[1]) + ';branch=master' 641 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
640 srcsubdir = 'git'
641 break 642 break
642 643
643 if args.src_subdir: 644 if args.src_subdir:
@@ -735,7 +736,7 @@ def create_recipe(args):
735 if srcsubdir and not args.binary: 736 if srcsubdir and not args.binary:
736 # (for binary packages we explicitly specify subdir= when fetching to 737 # (for binary packages we explicitly specify subdir= when fetching to
737 # match the default value of S, so we don't need to set it in that case) 738 # match the default value of S, so we don't need to set it in that case)
738 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 739 lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir)
739 lines_before.append('') 740 lines_before.append('')
740 741
741 if pkgarch: 742 if pkgarch:
@@ -764,6 +765,7 @@ def create_recipe(args):
764 extrafiles = extravalues.pop('extrafiles', {}) 765 extrafiles = extravalues.pop('extrafiles', {})
765 extra_pn = extravalues.pop('PN', None) 766 extra_pn = extravalues.pop('PN', None)
766 extra_pv = extravalues.pop('PV', None) 767 extra_pv = extravalues.pop('PV', None)
768 run_tasks = extravalues.pop('run_tasks', "").split()
767 769
768 if extra_pv and not realpv: 770 if extra_pv and not realpv:
769 realpv = extra_pv 771 realpv = extra_pv
@@ -824,7 +826,8 @@ def create_recipe(args):
824 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 826 extraoutdir = os.path.join(os.path.dirname(outfile), pn)
825 bb.utils.mkdirhier(extraoutdir) 827 bb.utils.mkdirhier(extraoutdir)
826 for destfn, extrafile in extrafiles.items(): 828 for destfn, extrafile in extrafiles.items():
827 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 829 fn = destfn.format(pn=pn, pv=realpv)
830 shutil.move(extrafile, os.path.join(extraoutdir, fn))
828 831
829 lines = lines_before 832 lines = lines_before
830 lines_before = [] 833 lines_before = []
@@ -839,7 +842,7 @@ def create_recipe(args):
839 line = line.replace(realpv, '${PV}') 842 line = line.replace(realpv, '${PV}')
840 if pn: 843 if pn:
841 line = line.replace(pn, '${BPN}') 844 line = line.replace(pn, '${BPN}')
842 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 845 if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line:
843 skipblank = True 846 skipblank = True
844 continue 847 continue
845 elif line.startswith('SRC_URI = '): 848 elif line.startswith('SRC_URI = '):
@@ -917,6 +920,10 @@ def create_recipe(args):
917 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 920 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
918 tinfoil.modified_files() 921 tinfoil.modified_files()
919 922
923 for task in run_tasks:
924 logger.info("Running task %s" % task)
925 tinfoil.build_file_sync(outfile, task)
926
920 if tempsrc: 927 if tempsrc:
921 if args.keep_temp: 928 if args.keep_temp:
922 logger.info('Preserving temporary directory %s' % tempsrc) 929 logger.info('Preserving temporary directory %s' % tempsrc)
@@ -944,23 +951,13 @@ def fixup_license(value):
944 return '(' + value + ')' 951 return '(' + value + ')'
945 return value 952 return value
946 953
947def tidy_licenses(value):
948 """Flat, split and sort licenses"""
949 from oe.license import flattened_licenses
950 def _choose(a, b):
951 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
952 return ["(%s | %s)" % (str_a, str_b)]
953 if not isinstance(value, str):
954 value = " & ".join(value)
955 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
956
957def handle_license_vars(srctree, lines_before, handled, extravalues, d): 954def handle_license_vars(srctree, lines_before, handled, extravalues, d):
958 lichandled = [x for x in handled if x[0] == 'license'] 955 lichandled = [x for x in handled if x[0] == 'license']
959 if lichandled: 956 if lichandled:
960 # Someone else has already handled the license vars, just return their value 957 # Someone else has already handled the license vars, just return their value
961 return lichandled[0][1] 958 return lichandled[0][1]
962 959
963 licvalues = guess_license(srctree, d) 960 licvalues = find_licenses(srctree, d)
964 licenses = [] 961 licenses = []
965 lic_files_chksum = [] 962 lic_files_chksum = []
966 lic_unknown = [] 963 lic_unknown = []
@@ -1040,222 +1037,9 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
1040 handled.append(('license', licvalues)) 1037 handled.append(('license', licvalues))
1041 return licvalues 1038 return licvalues
1042 1039
1043def get_license_md5sums(d, static_only=False, linenumbers=False):
1044 import bb.utils
1045 import csv
1046 md5sums = {}
1047 if not static_only and not linenumbers:
1048 # Gather md5sums of license files in common license dir
1049 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1050 for fn in os.listdir(commonlicdir):
1051 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
1052 md5sums[md5value] = fn
1053
1054 # The following were extracted from common values in various recipes
1055 # (double checking the license against the license file itself, not just
1056 # the LICENSE value in the recipe)
1057
1058 # Read license md5sums from csv file
1059 scripts_path = os.path.dirname(os.path.realpath(__file__))
1060 for path in (d.getVar('BBPATH').split(':')
1061 + [os.path.join(scripts_path, '..', '..')]):
1062 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv')
1063 if os.path.isfile(csv_path):
1064 with open(csv_path, newline='') as csv_file:
1065 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5']
1066 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
1067 for row in reader:
1068 if linenumbers:
1069 md5sums[row['md5sum']] = (
1070 row['license'], row['beginline'], row['endline'], row['md5'])
1071 else:
1072 md5sums[row['md5sum']] = row['license']
1073
1074 return md5sums
1075
1076def crunch_known_licenses(d):
1077 '''
1078 Calculate the MD5 checksums for the crunched versions of all common
1079 licenses. Also add additional known checksums.
1080 '''
1081
1082 crunched_md5sums = {}
1083
1084 # common licenses
1085 crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only'
1086 crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only'
1087 crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only'
1088
1089 # The following two were gleaned from the "forever" npm package
1090 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
1091 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1092 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause'
1093 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1094 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only'
1095 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1096 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only'
1097 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1098 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only'
1099 # unixODBC-2.3.4 COPYING
1100 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only'
1101 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1102 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only'
1103 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1104 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1105
1106 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD
1107 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause'
1108 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE
1109 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause'
1110 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE
1111 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause'
1112 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE
1113 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause'
1114 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE
1115 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause'
1116 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE
1117 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause'
1118 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE
1119 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause'
1120 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE
1121 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause'
1122 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE
1123 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause'
1124 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE
1125 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT'
1126 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE
1127 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT'
1128 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE
1129 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0'
1130 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md
1131 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0'
1132 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE
1133 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0'
1134 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt
1135 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0'
1136 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE
1137 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0'
1138 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE
1139 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense'
1140 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
1141 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
1142
1143 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1144 for fn in sorted(os.listdir(commonlicdir)):
1145 md5value, lictext = crunch_license(os.path.join(commonlicdir, fn))
1146 if md5value not in crunched_md5sums:
1147 crunched_md5sums[md5value] = fn
1148 elif fn != crunched_md5sums[md5value]:
1149 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn))
1150 else:
1151 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value]))
1152
1153 return crunched_md5sums
1154
1155def crunch_license(licfile):
1156 '''
1157 Remove non-material text from a license file and then calculate its
1158 md5sum. This works well for licenses that contain a copyright statement,
1159 but is also a useful way to handle people's insistence upon reformatting
1160 the license text slightly (with no material difference to the text of the
1161 license).
1162 '''
1163
1164 import oe.utils
1165
1166 # Note: these are carefully constructed!
1167 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
1168 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1169 copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
1170 disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
1171 email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
1172 header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
1173 tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
1174 url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
1175
1176 lictext = []
1177 with open(licfile, 'r', errors='surrogateescape') as f:
1178 for line in f:
1179 # Drop opening statements
1180 if copyright_re.match(line):
1181 continue
1182 elif disclaimer_re.match(line):
1183 continue
1184 elif email_re.match(line):
1185 continue
1186 elif header_re.match(line):
1187 continue
1188 elif tag_re.match(line):
1189 continue
1190 elif url_re.match(line):
1191 continue
1192 elif license_title_re.match(line):
1193 continue
1194 elif license_statement_re.match(line):
1195 continue
1196 # Strip comment symbols
1197 line = line.replace('*', '') \
1198 .replace('#', '')
1199 # Unify spelling
1200 line = line.replace('sub-license', 'sublicense')
1201 # Squash spaces
1202 line = oe.utils.squashspaces(line.strip())
1203 # Replace smart quotes, double quotes and backticks with single quotes
1204 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
1205 # Unify brackets
1206 line = line.replace("{", "[").replace("}", "]")
1207 if line:
1208 lictext.append(line)
1209
1210 m = hashlib.md5()
1211 try:
1212 m.update(' '.join(lictext).encode('utf-8'))
1213 md5val = m.hexdigest()
1214 except UnicodeEncodeError:
1215 md5val = None
1216 lictext = ''
1217 return md5val, lictext
1218
1219def guess_license(srctree, d):
1220 import bb
1221 md5sums = get_license_md5sums(d)
1222
1223 crunched_md5sums = crunch_known_licenses(d)
1224
1225 licenses = []
1226 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
1227 skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go")
1228 licfiles = []
1229 for root, dirs, files in os.walk(srctree):
1230 for fn in files:
1231 if fn.endswith(skip_extensions):
1232 continue
1233 for spec in licspecs:
1234 if fnmatch.fnmatch(fn, spec):
1235 fullpath = os.path.join(root, fn)
1236 if not fullpath in licfiles:
1237 licfiles.append(fullpath)
1238 for licfile in sorted(licfiles):
1239 md5value = bb.utils.md5_file(licfile)
1240 license = md5sums.get(md5value, None)
1241 if not license:
1242 crunched_md5, lictext = crunch_license(licfile)
1243 license = crunched_md5sums.get(crunched_md5, None)
1244 if lictext and not license:
1245 license = 'Unknown'
1246 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
1247 "and replace `Unknown` with the license:\n" \
1248 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value))
1249 if license:
1250 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
1251
1252 # FIXME should we grab at least one source file with a license header and add that too?
1253
1254 return licenses
1255
1256def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): 1040def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
1257 """ 1041 """
1258 Given a list of (license, path, md5sum) as returned by guess_license(), 1042 Given a list of (license, path, md5sum) as returned by match_licenses(),
1259 a dict of package name to path mappings, write out a set of 1043 a dict of package name to path mappings, write out a set of
1260 package-specific LICENSE values. 1044 package-specific LICENSE values.
1261 """ 1045 """
@@ -1284,6 +1068,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn
1284 outlicenses[pkgname] = licenses 1068 outlicenses[pkgname] = licenses
1285 return outlicenses 1069 return outlicenses
1286 1070
1071def generate_common_licenses_chksums(common_licenses, d):
1072 lic_files_chksums = []
1073 for license in tidy_licenses(common_licenses):
1074 licfile = '${COMMON_LICENSE_DIR}/' + license
1075 md5value = bb.utils.md5_file(d.expand(licfile))
1076 lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value))
1077 return lic_files_chksums
1078
1287def read_pkgconfig_provides(d): 1079def read_pkgconfig_provides(d):
1288 pkgdatadir = d.getVar('PKGDATA_DIR') 1080 pkgdatadir = d.getVar('PKGDATA_DIR')
1289 pkgmap = {} 1081 pkgmap = {}
@@ -1418,4 +1210,3 @@ def register_commands(subparsers):
1418 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1210 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
1419 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1211 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
1420 parser_create.set_defaults(func=create_recipe) 1212 parser_create.set_defaults(func=create_recipe)
1421
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
index c560831442..4b1fa39d13 100644
--- a/scripts/lib/recipetool/create_go.py
+++ b/scripts/lib/recipetool/create_go.py
@@ -10,13 +10,7 @@
10# 10#
11 11
12 12
13from collections import namedtuple
14from enum import Enum
15from html.parser import HTMLParser
16from recipetool.create import RecipeHandler, handle_license_vars 13from recipetool.create import RecipeHandler, handle_license_vars
17from recipetool.create import guess_license, tidy_licenses, fixup_license
18from recipetool.create import determine_from_url
19from urllib.error import URLError
20 14
21import bb.utils 15import bb.utils
22import json 16import json
@@ -25,33 +19,20 @@ import os
25import re 19import re
26import subprocess 20import subprocess
27import sys 21import sys
28import shutil
29import tempfile 22import tempfile
30import urllib.parse
31import urllib.request
32 23
33 24
34GoImport = namedtuple('GoImport', 'root vcs url suffix')
35logger = logging.getLogger('recipetool') 25logger = logging.getLogger('recipetool')
36CodeRepo = namedtuple(
37 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
38 26
39tinfoil = None 27tinfoil = None
40 28
41# Regular expression to parse pseudo semantic version
42# see https://go.dev/ref/mod#pseudo-versions
43re_pseudo_semver = re.compile(
44 r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
45# Regular expression to parse semantic version
46re_semver = re.compile(
47 r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
48
49 29
50def tinfoil_init(instance): 30def tinfoil_init(instance):
51 global tinfoil 31 global tinfoil
52 tinfoil = instance 32 tinfoil = instance
53 33
54 34
35
55class GoRecipeHandler(RecipeHandler): 36class GoRecipeHandler(RecipeHandler):
56 """Class to handle the go recipe creation""" 37 """Class to handle the go recipe creation"""
57 38
@@ -83,580 +64,6 @@ class GoRecipeHandler(RecipeHandler):
83 64
84 return bindir 65 return bindir
85 66
86 def __resolve_repository_static(self, modulepath):
87 """Resolve the repository in a static manner
88
89 The method is based on the go implementation of
90 `repoRootFromVCSPaths` in
91 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
92 """
93
94 url = urllib.parse.urlparse("https://" + modulepath)
95 req = urllib.request.Request(url.geturl())
96
97 try:
98 resp = urllib.request.urlopen(req)
99 # Some modulepath are just redirects to github (or some other vcs
100 # hoster). Therefore, we check if this modulepath redirects to
101 # somewhere else
102 if resp.geturl() != url.geturl():
103 bb.debug(1, "%s is redirectred to %s" %
104 (url.geturl(), resp.geturl()))
105 url = urllib.parse.urlparse(resp.geturl())
106 modulepath = url.netloc + url.path
107
108 except URLError as url_err:
109 # This is probably because the module path
110 # contains the subdir and major path. Thus,
111 # we ignore this error for now
112 logger.debug(
113 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
114
115 host, _, _ = modulepath.partition('/')
116
117 class vcs(Enum):
118 pathprefix = "pathprefix"
119 regexp = "regexp"
120 type = "type"
121 repo = "repo"
122 check = "check"
123 schemelessRepo = "schemelessRepo"
124
125 # GitHub
126 vcsGitHub = {}
127 vcsGitHub[vcs.pathprefix] = "github.com"
128 vcsGitHub[vcs.regexp] = re.compile(
129 r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
130 vcsGitHub[vcs.type] = "git"
131 vcsGitHub[vcs.repo] = "https://\\g<root>"
132
133 # Bitbucket
134 vcsBitbucket = {}
135 vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
136 vcsBitbucket[vcs.regexp] = re.compile(
137 r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
138 vcsBitbucket[vcs.type] = "git"
139 vcsBitbucket[vcs.repo] = "https://\\g<root>"
140
141 # IBM DevOps Services (JazzHub)
142 vcsIBMDevOps = {}
143 vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
144 vcsIBMDevOps[vcs.regexp] = re.compile(
145 r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
146 vcsIBMDevOps[vcs.type] = "git"
147 vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
148
149 # Git at Apache
150 vcsApacheGit = {}
151 vcsApacheGit[vcs.pathprefix] = "git.apache.org"
152 vcsApacheGit[vcs.regexp] = re.compile(
153 r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
154 vcsApacheGit[vcs.type] = "git"
155 vcsApacheGit[vcs.repo] = "https://\\g<root>"
156
157 # Git at OpenStack
158 vcsOpenStackGit = {}
159 vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
160 vcsOpenStackGit[vcs.regexp] = re.compile(
161 r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
162 vcsOpenStackGit[vcs.type] = "git"
163 vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
164
165 # chiselapp.com for fossil
166 vcsChiselapp = {}
167 vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
168 vcsChiselapp[vcs.regexp] = re.compile(
169 r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
170 vcsChiselapp[vcs.type] = "fossil"
171 vcsChiselapp[vcs.repo] = "https://\\g<root>"
172
173 # General syntax for any server.
174 # Must be last.
175 vcsGeneralServer = {}
176 vcsGeneralServer[vcs.regexp] = re.compile(
177 "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
178 vcsGeneralServer[vcs.schemelessRepo] = True
179
180 vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
181 vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
182 vcsGeneralServer]
183
184 if modulepath.startswith("example.net") or modulepath == "rsc.io":
185 logger.warning("Suspicious module path %s" % modulepath)
186 return None
187 if modulepath.startswith("http:") or modulepath.startswith("https:"):
188 logger.warning("Import path should not start with %s %s" %
189 ("http", "https"))
190 return None
191
192 rootpath = None
193 vcstype = None
194 repourl = None
195 suffix = None
196
197 for srv in vcsPaths:
198 m = srv[vcs.regexp].match(modulepath)
199 if vcs.pathprefix in srv:
200 if host == srv[vcs.pathprefix]:
201 rootpath = m.group('root')
202 vcstype = srv[vcs.type]
203 repourl = m.expand(srv[vcs.repo])
204 suffix = m.group('suffix')
205 break
206 elif m and srv[vcs.schemelessRepo]:
207 rootpath = m.group('root')
208 vcstype = m[vcs.type]
209 repourl = m[vcs.repo]
210 suffix = m.group('suffix')
211 break
212
213 return GoImport(rootpath, vcstype, repourl, suffix)
214
215 def __resolve_repository_dynamic(self, modulepath):
216 """Resolve the repository root in a dynamic manner.
217
218 The method is based on the go implementation of
219 `repoRootForImportDynamic` in
220 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
221 """
222 url = urllib.parse.urlparse("https://" + modulepath)
223
224 class GoImportHTMLParser(HTMLParser):
225
226 def __init__(self):
227 super().__init__()
228 self.__srv = []
229
230 def handle_starttag(self, tag, attrs):
231 if tag == 'meta' and list(
232 filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
233 content = list(
234 filter(lambda a: (a[0] == 'content'), attrs))
235 if content:
236 self.__srv = content[0][1].split()
237
238 @property
239 def import_prefix(self):
240 return self.__srv[0] if len(self.__srv) else None
241
242 @property
243 def vcs(self):
244 return self.__srv[1] if len(self.__srv) else None
245
246 @property
247 def repourl(self):
248 return self.__srv[2] if len(self.__srv) else None
249
250 url = url.geturl() + "?go-get=1"
251 req = urllib.request.Request(url)
252
253 try:
254 resp = urllib.request.urlopen(req)
255
256 except URLError as url_err:
257 logger.warning(
258 "Failed to fetch page from [%s]: %s", url, str(url_err))
259 return None
260
261 parser = GoImportHTMLParser()
262 parser.feed(resp.read().decode('utf-8'))
263 parser.close()
264
265 return GoImport(parser.import_prefix, parser.vcs, parser.repourl, None)
266
267 def __resolve_from_golang_proxy(self, modulepath, version):
268 """
269 Resolves repository data from golang proxy
270 """
271 url = urllib.parse.urlparse("https://proxy.golang.org/"
272 + modulepath
273 + "/@v/"
274 + version
275 + ".info")
276
277 # Transform url to lower case, golang proxy doesn't like mixed case
278 req = urllib.request.Request(url.geturl().lower())
279
280 try:
281 resp = urllib.request.urlopen(req)
282 except URLError as url_err:
283 logger.warning(
284 "Failed to fetch page from [%s]: %s", url, str(url_err))
285 return None
286
287 golang_proxy_res = resp.read().decode('utf-8')
288 modinfo = json.loads(golang_proxy_res)
289
290 if modinfo and 'Origin' in modinfo:
291 origin = modinfo['Origin']
292 _root_url = urllib.parse.urlparse(origin['URL'])
293
294 # We normalize the repo URL since we don't want the scheme in it
295 _subdir = origin['Subdir'] if 'Subdir' in origin else None
296 _root, _, _ = self.__split_path_version(modulepath)
297 if _subdir:
298 _root = _root[:-len(_subdir)].strip('/')
299
300 _commit = origin['Hash']
301 _vcs = origin['VCS']
302 return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
303
304 return None
305
306 def __resolve_repository(self, modulepath):
307 """
308 Resolves src uri from go module-path
309 """
310 repodata = self.__resolve_repository_static(modulepath)
311 if not repodata or not repodata.url:
312 repodata = self.__resolve_repository_dynamic(modulepath)
313 if not repodata or not repodata.url:
314 logger.error(
315 "Could not resolve repository for module path '%s'" % modulepath)
316 # There is no way to recover from this
317 sys.exit(14)
318 if repodata:
319 logger.debug(1, "Resolved download path for import '%s' => %s" % (
320 modulepath, repodata.url))
321 return repodata
322
323 def __split_path_version(self, path):
324 i = len(path)
325 dot = False
326 for j in range(i, 0, -1):
327 if path[j - 1] < '0' or path[j - 1] > '9':
328 break
329 if path[j - 1] == '.':
330 dot = True
331 break
332 i = j - 1
333
334 if i <= 1 or i == len(
335 path) or path[i - 1] != 'v' or path[i - 2] != '/':
336 return path, "", True
337
338 prefix, pathMajor = path[:i - 2], path[i - 2:]
339 if dot or len(
340 pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
341 return path, "", False
342
343 return prefix, pathMajor, True
344
345 def __get_path_major(self, pathMajor):
346 if not pathMajor:
347 return ""
348
349 if pathMajor[0] != '/' and pathMajor[0] != '.':
350 logger.error(
351 "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
352
353 if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
354 pathMajor = pathMajor[:len("-unstable") - 2]
355
356 return pathMajor[1:]
357
358 def __build_coderepo(self, repo, path):
359 codedir = ""
360 pathprefix, pathMajor, _ = self.__split_path_version(path)
361 if repo.root == path:
362 pathprefix = path
363 elif path.startswith(repo.root):
364 codedir = pathprefix[len(repo.root):].strip('/')
365
366 pseudoMajor = self.__get_path_major(pathMajor)
367
368 logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
369 repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
370
371 return CodeRepo(path, repo.root, codedir,
372 pathMajor, pathprefix, pseudoMajor)
373
374 def __resolve_version(self, repo, path, version):
375 hash = None
376 coderoot = self.__build_coderepo(repo, path)
377
378 def vcs_fetch_all():
379 tmpdir = tempfile.mkdtemp()
380 clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
381 bb.process.run(clone_cmd)
382 log_cmd = "git log --all --pretty='%H %d' --decorate=short"
383 output, _ = bb.process.run(
384 log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
385 bb.utils.prunedir(tmpdir)
386 return output.strip().split('\n')
387
388 def vcs_fetch_remote(tag):
389 # add * to grab ^{}
390 refs = {}
391 ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
392 repo.url, tag)
393 output, _ = bb.process.run(ls_remote_cmd)
394 output = output.strip().split('\n')
395 for line in output:
396 f = line.split(maxsplit=1)
397 if len(f) != 2:
398 continue
399
400 for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
401 if f[1].startswith(prefix):
402 refs[f[1][len(prefix):]] = f[0]
403
404 for key, hash in refs.items():
405 if key.endswith(r"^{}"):
406 refs[key.strip(r"^{}")] = hash
407
408 return refs[tag]
409
410 m_pseudo_semver = re_pseudo_semver.match(version)
411
412 if m_pseudo_semver:
413 remote_refs = vcs_fetch_all()
414 short_commit = m_pseudo_semver.group('commithash')
415 for l in remote_refs:
416 r = l.split(maxsplit=1)
417 sha1 = r[0] if len(r) else None
418 if not sha1:
419 logger.error(
420 "Ups: could not resolve abbref commit for %s" % short_commit)
421
422 elif sha1.startswith(short_commit):
423 hash = sha1
424 break
425 else:
426 m_semver = re_semver.match(version)
427 if m_semver:
428
429 def get_sha1_remote(re):
430 rsha1 = None
431 for line in remote_refs:
432 # Split lines of the following format:
433 # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
434 lineparts = line.split(maxsplit=1)
435 sha1 = lineparts[0] if len(lineparts) else None
436 refstring = lineparts[1] if len(
437 lineparts) == 2 else None
438 if refstring:
439 # Normalize tag string and split in case of multiple
440 # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
441 refs = refstring.strip('(), ').split(',')
442 for ref in refs:
443 if re.match(ref.strip()):
444 rsha1 = sha1
445 return rsha1
446
447 semver = "v" + m_semver.group('major') + "."\
448 + m_semver.group('minor') + "."\
449 + m_semver.group('patch') \
450 + (("-" + m_semver.group('prerelease'))
451 if m_semver.group('prerelease') else "")
452
453 tag = os.path.join(
454 coderoot.codeDir, semver) if coderoot.codeDir else semver
455
456 # probe tag using 'ls-remote', which is faster than fetching
457 # complete history
458 hash = vcs_fetch_remote(tag)
459 if not hash:
460 # backup: fetch complete history
461 remote_refs = vcs_fetch_all()
462 hash = get_sha1_remote(
463 re.compile(fr"(tag:|HEAD ->) ({tag})"))
464
465 logger.debug(
466 "Resolving commit for tag '%s' -> '%s'", tag, hash)
467 return hash
468
469 def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
470 """Generate SRC_URI functions for go imports"""
471
472 logger.info("Resolving repository for module %s", path)
473 # First try to resolve repo and commit from golang proxy
474 # Most info is already there and we don't have to go through the
475 # repository or even perform the version resolve magic
476 golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
477 if golang_proxy_info:
478 repo = golang_proxy_info[0]
479 commit = golang_proxy_info[1]
480 else:
481 # Fallback
482 # Resolve repository by 'hand'
483 repo = self.__resolve_repository(path)
484 commit = self.__resolve_version(repo, path, version)
485
486 url = urllib.parse.urlparse(repo.url)
487 repo_url = url.netloc + url.path
488
489 coderoot = self.__build_coderepo(repo, path)
490
491 inline_fcn = "${@go_src_uri("
492 inline_fcn += f"'{repo_url}','{version}'"
493 if repo_url != path:
494 inline_fcn += f",path='{path}'"
495 if coderoot.codeDir:
496 inline_fcn += f",subdir='{coderoot.codeDir}'"
497 if repo.vcs != 'git':
498 inline_fcn += f",vcs='{repo.vcs}'"
499 if replaces:
500 inline_fcn += f",replaces='{replaces}'"
501 if coderoot.pathMajor:
502 inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
503 inline_fcn += ")}"
504
505 return inline_fcn, commit
506
507 def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
508
509 import re
510 src_uris = []
511 src_revs = []
512
513 def generate_src_rev(path, version, commithash):
514 src_rev = f"# {path}@{version} => {commithash}\n"
515 # Ups...maybe someone manipulated the source repository and the
516 # version or commit could not be resolved. This is a sign of
517 # a) the supply chain was manipulated (bad)
518 # b) the implementation for the version resolving didn't work
519 # anymore (less bad)
520 if not commithash:
521 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
522 src_rev += f"#!!! Could not resolve version !!!\n"
523 src_rev += f"#!!! Possible supply chain attack !!!\n"
524 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
525 src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
526
527 return src_rev
528
529 # we first go over replacement list, because we are essentialy
530 # interested only in the replaced path
531 if go_mod['Replace']:
532 for replacement in go_mod['Replace']:
533 oldpath = replacement['Old']['Path']
534 path = replacement['New']['Path']
535 version = ''
536 if 'Version' in replacement['New']:
537 version = replacement['New']['Version']
538
539 if os.path.exists(os.path.join(srctree, path)):
540 # the module refers to the local path, remove it from requirement list
541 # because it's a local module
542 go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
543 else:
544 # Replace the path and the version, so we don't iterate replacement list anymore
545 for require in go_mod['Require']:
546 if require['Path'] == oldpath:
547 require.update({'Path': path, 'Version': version})
548 break
549
550 for require in go_mod['Require']:
551 path = require['Path']
552 version = require['Version']
553
554 inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
555 path, version)
556 src_uris.append(inline_fcn)
557 src_revs.append(generate_src_rev(path, version, commithash))
558
559 # strip version part from module URL /vXX
560 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
561 pn, _ = determine_from_url(baseurl)
562 go_mods_basename = "%s-modules.inc" % pn
563
564 go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
565 with open(go_mods_filename, "w") as f:
566 # We introduce this indirection to make the tests a little easier
567 f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
568 f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
569 for uri in src_uris:
570 f.write(" " + uri + " \\\n")
571 f.write("\"\n\n")
572 for rev in src_revs:
573 f.write(rev + "\n")
574
575 extravalues['extrafiles'][go_mods_basename] = go_mods_filename
576
577 def __go_run_cmd(self, cmd, cwd, d):
578 return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
579 shell=True, cwd=cwd)
580
581 def __go_native_version(self, d):
582 stdout, _ = self.__go_run_cmd("go version", None, d)
583 m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
584 major = int(m.group(2))
585 minor = int(m.group(3))
586 patch = int(m.group(4))
587
588 return major, minor, patch
589
590 def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
591
592 patchfilename = "go.mod.patch"
593 go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
594 d)
595 self.__go_run_cmd("go mod tidy -go=%d.%d" %
596 (go_native_version_major, go_native_version_minor), srctree, d)
597 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
598
599 # Create patch in order to upgrade go version
600 self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
601 # Restore original state
602 self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
603
604 go_mod = json.loads(stdout)
605 tmpfile = os.path.join(localfilesdir, patchfilename)
606 shutil.move(os.path.join(srctree, patchfilename), tmpfile)
607
608 extravalues['extrafiles'][patchfilename] = tmpfile
609
610 return go_mod, patchfilename
611
612 def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
613 # Perform vendoring to retrieve the correct modules.txt
614 tmp_vendor_dir = tempfile.mkdtemp()
615
616 # -v causes to go to print modules.txt to stderr
617 _, stderr = self.__go_run_cmd(
618 "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
619
620 modules_txt_basename = "modules.txt"
621 modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
622 with open(modules_txt_filename, "w") as f:
623 f.write(stderr)
624
625 extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
626
627 licenses = []
628 lic_files_chksum = []
629 licvalues = guess_license(tmp_vendor_dir, d)
630 shutil.rmtree(tmp_vendor_dir)
631
632 if licvalues:
633 for licvalue in licvalues:
634 license = licvalue[0]
635 lics = tidy_licenses(fixup_license(license))
636 lics = [lic for lic in lics if lic not in licenses]
637 if len(lics):
638 licenses.extend(lics)
639 lic_files_chksum.append(
640 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
641
642 # strip version part from module URL /vXX
643 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
644 pn, _ = determine_from_url(baseurl)
645 licenses_basename = "%s-licenses.inc" % pn
646
647 licenses_filename = os.path.join(localfilesdir, licenses_basename)
648 with open(licenses_filename, "w") as f:
649 f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
650 ' & '.join(sorted(licenses, key=str.casefold)))
651 # We introduce this indirection to make the tests a little easier
652 f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
653 f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
654 for lic in lic_files_chksum:
655 f.write(" " + lic + " \\\n")
656 f.write("\"\n")
657
658 extravalues['extrafiles'][licenses_basename] = licenses_filename
659
660 def process(self, srctree, classes, lines_before, 67 def process(self, srctree, classes, lines_before,
661 lines_after, handled, extravalues): 68 lines_after, handled, extravalues):
662 69
@@ -667,63 +74,52 @@ class GoRecipeHandler(RecipeHandler):
667 if not files: 74 if not files:
668 return False 75 return False
669 76
670 d = bb.data.createCopy(tinfoil.config_data)
671 go_bindir = self.__ensure_go() 77 go_bindir = self.__ensure_go()
672 if not go_bindir: 78 if not go_bindir:
673 sys.exit(14) 79 sys.exit(14)
674 80
675 d.prependVar('PATH', '%s:' % go_bindir)
676 handled.append('buildsystem') 81 handled.append('buildsystem')
677 classes.append("go-vendor") 82 classes.append("go-mod")
678 83
679 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) 84 # Use go-mod-update-modules to set the full SRC_URI and LICENSE
85 classes.append("go-mod-update-modules")
86 extravalues["run_tasks"] = "update_modules"
680 87
681 go_mod = json.loads(stdout) 88 with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir:
682 go_import = go_mod['Module']['Path'] 89 env = dict(os.environ)
683 go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) 90 env["PATH"] += f":{go_bindir}"
684 go_version_major = int(go_version_match.group(1)) 91 env['GOMODCACHE'] = tmp_mod_dir
685 go_version_minor = int(go_version_match.group(2))
686 src_uris = []
687 92
688 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') 93 stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True)
689 extravalues.setdefault('extrafiles', {}) 94 go_mod = json.loads(stdout)
95 go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path'])
690 96
691 # Use an explicit name determined from the module name because it 97 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
692 # might differ from the actual URL for replaced modules 98 extravalues.setdefault('extrafiles', {})
693 # strip version part from module URL /vXX
694 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
695 pn, _ = determine_from_url(baseurl)
696 99
697 # go.mod files with version < 1.17 may not include all indirect 100 # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files
698 # dependencies. Thus, we have to upgrade the go version. 101 basename = "{pn}-licenses.inc"
699 if go_version_major == 1 and go_version_minor < 17: 102 filename = os.path.join(localfilesdir, basename)
700 logger.warning( 103 with open(filename, "w") as f:
701 "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") 104 f.write("# FROM RECIPETOOL\n")
702 go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, 105 extravalues['extrafiles'][f"../{basename}"] = filename
703 extravalues, d)
704 src_uris.append(
705 "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
706 106
707 # Check whether the module is vendored. If so, we have nothing to do. 107 basename = "{pn}-go-mods.inc"
708 # Otherwise we gather all dependencies and add them to the recipe 108 filename = os.path.join(localfilesdir, basename)
709 if not os.path.exists(os.path.join(srctree, "vendor")): 109 with open(filename, "w") as f:
110 f.write("# FROM RECIPETOOL\n")
111 extravalues['extrafiles'][f"../{basename}"] = filename
710 112
711 # Write additional $BPN-modules.inc file 113 # Do generic license handling
712 self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) 114 d = bb.data.createCopy(tinfoil.config_data)
713 lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") 115 handle_license_vars(srctree, lines_before, handled, extravalues, d)
714 lines_before.append("require %s-licenses.inc" % (pn)) 116 self.__rewrite_lic_vars(lines_before)
715 117
716 self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) 118 self.__rewrite_src_uri(lines_before)
717 119
718 self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) 120 lines_before.append('require ${BPN}-licenses.inc')
719 lines_before.append("require %s-modules.inc" % (pn)) 121 lines_before.append('require ${BPN}-go-mods.inc')
720 122 lines_before.append(f'GO_IMPORT = "{go_import}"')
721 # Do generic license handling
722 handle_license_vars(srctree, lines_before, handled, extravalues, d)
723 self.__rewrite_lic_uri(lines_before)
724
725 lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
726 lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
727 123
728 def __update_lines_before(self, updated, newlines, lines_before): 124 def __update_lines_before(self, updated, newlines, lines_before):
729 if updated: 125 if updated:
@@ -735,9 +131,9 @@ class GoRecipeHandler(RecipeHandler):
735 lines_before.append(line) 131 lines_before.append(line)
736 return updated 132 return updated
737 133
738 def __rewrite_lic_uri(self, lines_before): 134 def __rewrite_lic_vars(self, lines_before):
739
740 def varfunc(varname, origvalue, op, newlines): 135 def varfunc(varname, origvalue, op, newlines):
136 import urllib.parse
741 if varname == 'LIC_FILES_CHKSUM': 137 if varname == 'LIC_FILES_CHKSUM':
742 new_licenses = [] 138 new_licenses = []
743 licenses = origvalue.split('\\') 139 licenses = origvalue.split('\\')
@@ -762,12 +158,11 @@ class GoRecipeHandler(RecipeHandler):
762 lines_before, ['LIC_FILES_CHKSUM'], varfunc) 158 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
763 return self.__update_lines_before(updated, newlines, lines_before) 159 return self.__update_lines_before(updated, newlines, lines_before)
764 160
765 def __rewrite_src_uri(self, lines_before, additional_uris = []): 161 def __rewrite_src_uri(self, lines_before):
766 162
767 def varfunc(varname, origvalue, op, newlines): 163 def varfunc(varname, origvalue, op, newlines):
768 if varname == 'SRC_URI': 164 if varname == 'SRC_URI':
769 src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] 165 src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}']
770 src_uri.extend(additional_uris)
771 return src_uri, None, -1, True 166 return src_uri, None, -1, True
772 return origvalue, None, 0, True 167 return origvalue, None, 0, True
773 168
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 113a89f6a6..8c4cdd5234 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -15,9 +15,9 @@ import bb
15from bb.fetch2.npm import NpmEnvironment 15from bb.fetch2.npm import NpmEnvironment
16from bb.fetch2.npm import npm_package 16from bb.fetch2.npm import npm_package
17from bb.fetch2.npmsw import foreach_dependencies 17from bb.fetch2.npmsw import foreach_dependencies
18from oe.license_finder import match_licenses, find_license_files
18from recipetool.create import RecipeHandler 19from recipetool.create import RecipeHandler
19from recipetool.create import get_license_md5sums 20from recipetool.create import generate_common_licenses_chksums
20from recipetool.create import guess_license
21from recipetool.create import split_pkg_licenses 21from recipetool.create import split_pkg_licenses
22logger = logging.getLogger('recipetool') 22logger = logging.getLogger('recipetool')
23 23
@@ -112,40 +112,54 @@ class NpmRecipeHandler(RecipeHandler):
112 """Return the extra license files and the list of packages""" 112 """Return the extra license files and the list of packages"""
113 licfiles = [] 113 licfiles = []
114 packages = {} 114 packages = {}
115 # Licenses from package.json will point to COMMON_LICENSE_DIR so we need
116 # to associate them explicitely to packages for split_pkg_licenses()
117 fallback_licenses = dict()
118
119 def _find_package_licenses(destdir):
120 """Either find license files, or use package.json metadata"""
121 def _get_licenses_from_package_json(package_json):
122 with open(os.path.join(srctree, package_json), "r") as f:
123 data = json.load(f)
124 if "license" in data:
125 licenses = data["license"].split(" ")
126 licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"]
127 return [], licenses
128 else:
129 return [package_json], None
115 130
116 # Handle the parent package
117 packages["${PN}"] = ""
118
119 def _licfiles_append_fallback_readme_files(destdir):
120 """Append README files as fallback to license files if a license files is missing"""
121
122 fallback = True
123 readmes = []
124 basedir = os.path.join(srctree, destdir) 131 basedir = os.path.join(srctree, destdir)
125 for fn in os.listdir(basedir): 132 licfiles = find_license_files(basedir)
126 upper = fn.upper() 133 if len(licfiles) > 0:
127 if upper.startswith("README"): 134 return licfiles, None
128 fullpath = os.path.join(basedir, fn) 135 else:
129 readmes.append(fullpath) 136 # A license wasn't found in the package directory, so we'll use the package.json metadata
130 if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: 137 pkg_json = os.path.join(basedir, "package.json")
131 fallback = False 138 return _get_licenses_from_package_json(pkg_json)
132 if fallback: 139
133 for readme in readmes: 140 def _get_package_licenses(destdir, package):
134 licfiles.append(os.path.relpath(readme, srctree)) 141 (package_licfiles, package_licenses) = _find_package_licenses(destdir)
142 if package_licfiles:
143 licfiles.extend(package_licfiles)
144 else:
145 fallback_licenses[package] = package_licenses
135 146
136 # Handle the dependencies 147 # Handle the dependencies
137 def _handle_dependency(name, params, destdir): 148 def _handle_dependency(name, params, destdir):
138 deptree = destdir.split('node_modules/') 149 deptree = destdir.split('node_modules/')
139 suffix = "-".join([npm_package(dep) for dep in deptree]) 150 suffix = "-".join([npm_package(dep) for dep in deptree])
140 packages["${PN}" + suffix] = destdir 151 packages["${PN}" + suffix] = destdir
141 _licfiles_append_fallback_readme_files(destdir) 152 _get_package_licenses(destdir, "${PN}" + suffix)
142 153
143 with open(shrinkwrap_file, "r") as f: 154 with open(shrinkwrap_file, "r") as f:
144 shrinkwrap = json.load(f) 155 shrinkwrap = json.load(f)
145
146 foreach_dependencies(shrinkwrap, _handle_dependency, dev) 156 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
147 157
148 return licfiles, packages 158 # Handle the parent package
159 packages["${PN}"] = ""
160 _get_package_licenses(srctree, "${PN}")
161
162 return licfiles, packages, fallback_licenses
149 163
150 # Handle the peer dependencies 164 # Handle the peer dependencies
151 def _handle_peer_dependency(self, shrinkwrap_file): 165 def _handle_peer_dependency(self, shrinkwrap_file):
@@ -266,36 +280,12 @@ class NpmRecipeHandler(RecipeHandler):
266 fetcher.unpack(srctree) 280 fetcher.unpack(srctree)
267 281
268 bb.note("Handling licences ...") 282 bb.note("Handling licences ...")
269 (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) 283 (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev)
270 284 licvalues = match_licenses(licfiles, srctree, d)
271 def _guess_odd_license(licfiles): 285 split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses)
272 import bb 286 fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist]
273 287 extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d)
274 md5sums = get_license_md5sums(d, linenumbers=True) 288 extravalues["LICENSE"] = fallback_licenses_flat
275
276 chksums = []
277 licenses = []
278 for licfile in licfiles:
279 f = os.path.join(srctree, licfile)
280 md5value = bb.utils.md5_file(f)
281 (license, beginline, endline, md5) = md5sums.get(md5value,
282 (None, "", "", ""))
283 if not license:
284 license = "Unknown"
285 logger.info("Please add the following line for '%s' to a "
286 "'lib/recipetool/licenses.csv' and replace `Unknown`, "
287 "`X`, `Y` and `MD5` with the license, begin line, "
288 "end line and partial MD5 checksum:\n" \
289 "%s,Unknown,X,Y,MD5" % (licfile, md5value))
290 chksums.append("file://%s%s%s;md5=%s" % (licfile,
291 ";beginline=%s" % (beginline) if beginline else "",
292 ";endline=%s" % (endline) if endline else "",
293 md5 if md5 else md5value))
294 licenses.append((license, licfile, md5value))
295 return (licenses, chksums)
296
297 (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
298 split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
299 289
300 classes.append("npm") 290 classes.append("npm")
301 handled.append("buildsystem") 291 handled.append("buildsystem")
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv
deleted file mode 100644
index 80851111b3..0000000000
--- a/scripts/lib/recipetool/licenses.csv
+++ /dev/null
@@ -1,37 +0,0 @@
10636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only
212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only
318810669f13b87348459e611d31ab760,GPL-2.0-only
4252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only
52d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only
63214f080875748938ba060314b4f727d,LGPL-2.0-only
7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only
8393a5ca445f6965873eca0259a17f833,GPL-2.0-only
93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
103bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only
114325afd396febcb659c36b49533135d4,GPL-2.0-only
124fbd65380cdd255951079008b364516c,LGPL-2.1-only
1354c7042be62e169199200bc6477f04d1,BSD-3-Clause
1455ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only
1559530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only
165f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only
176a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only
18751419260aa954499f7abaabaa882bbe,GPL-2.0-only
197fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only
208ca43cbc842c2336e835926c2166c28b,GPL-2.0-only
2194d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only
229ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only
239f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only
24a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only
25b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only
26bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only
27bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only
28c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only
29d32239bcb673463ab874e80d47fae504,GPL-3.0-only
30d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only
31d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only
32db979804f025cf55aabec7129cb671ed,LGPL-2.0-only
33eb723b61539feef013de476e68b5c50a,GPL-2.0-only
34ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only
35f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only
36fad9b3332be894bab9bc501572864b29,LGPL-2.1-only
37fbc093901857fcd118f065f900982c24,LGPL-2.1-only
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py
new file mode 100644
index 0000000000..c7a53dc550
--- /dev/null
+++ b/scripts/lib/resulttool/junit.py
@@ -0,0 +1,77 @@
1# resulttool - report test results in JUnit XML format
2#
3# Copyright (c) 2024, Siemens AG.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import re
10import xml.etree.ElementTree as ET
11import resulttool.resultutils as resultutils
12
13def junit(args, logger):
14 testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map)
15
16 total_time = 0
17 skipped = 0
18 failures = 0
19 errors = 0
20
21 for tests in testresults.values():
22 results = tests[next(reversed(tests))].get("result", {})
23
24 for result_id, result in results.items():
25 # filter out ptestresult.rawlogs and ptestresult.sections
26 if re.search(r'\.test_', result_id):
27 total_time += result.get("duration", 0)
28
29 if result['status'] == "FAILED":
30 failures += 1
31 elif result['status'] == "ERROR":
32 errors += 1
33 elif result['status'] == "SKIPPED":
34 skipped += 1
35
36 testsuites_node = ET.Element("testsuites")
37 testsuites_node.set("time", "%s" % total_time)
38 testsuite_node = ET.SubElement(testsuites_node, "testsuite")
39 testsuite_node.set("name", "Testimage")
40 testsuite_node.set("time", "%s" % total_time)
41 testsuite_node.set("tests", "%s" % len(results))
42 testsuite_node.set("failures", "%s" % failures)
43 testsuite_node.set("errors", "%s" % errors)
44 testsuite_node.set("skipped", "%s" % skipped)
45
46 for result_id, result in results.items():
47 if re.search(r'\.test_', result_id):
48 testcase_node = ET.SubElement(testsuite_node, "testcase", {
49 "name": result_id,
50 "classname": "Testimage",
51 "time": str(result['duration'])
52 })
53 if result['status'] == "SKIPPED":
54 ET.SubElement(testcase_node, "skipped", message=result['log'])
55 elif result['status'] == "FAILED":
56 ET.SubElement(testcase_node, "failure", message=result['log'])
57 elif result['status'] == "ERROR":
58 ET.SubElement(testcase_node, "error", message=result['log'])
59
60 tree = ET.ElementTree(testsuites_node)
61
62 if args.junit_xml_path is None:
63 args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml'
64 tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True)
65
66 logger.info('Saved JUnit XML report as %s' % args.junit_xml_path)
67
68def register_commands(subparsers):
69 """Register subcommands from this plugin"""
70 parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format',
71 description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.',
72 group='analysis')
73 parser_build.set_defaults(func=junit)
74 parser_build.add_argument('json_file',
75 help='json file should point to the testresults.json')
76 parser_build.add_argument('-j', '--junit_xml_path',
77 help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml')
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py
index ecb27c5933..ae0861ac6b 100755
--- a/scripts/lib/resulttool/manualexecution.py
+++ b/scripts/lib/resulttool/manualexecution.py
@@ -22,7 +22,7 @@ def load_json_file(f):
22def write_json_file(f, json_data): 22def write_json_file(f, json_data):
23 os.makedirs(os.path.dirname(f), exist_ok=True) 23 os.makedirs(os.path.dirname(f), exist_ok=True)
24 with open(f, 'w') as filedata: 24 with open(f, 'w') as filedata:
25 filedata.write(json.dumps(json_data, sort_keys=True, indent=4)) 25 filedata.write(json.dumps(json_data, sort_keys=True, indent=1))
26 26
27class ManualTestRunner(object): 27class ManualTestRunner(object):
28 28
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py
index 10e7d13841..33b3119c54 100644
--- a/scripts/lib/resulttool/regression.py
+++ b/scripts/lib/resulttool/regression.py
@@ -212,6 +212,8 @@ def compare_result(logger, base_name, target_name, base_result, target_result, d
212 212
213 if base_result and target_result: 213 if base_result and target_result:
214 for k in base_result: 214 for k in base_result:
215 if k in ['ptestresult.rawlogs', 'ptestresult.sections']:
216 continue
215 base_testcase = base_result[k] 217 base_testcase = base_result[k]
216 base_status = base_testcase.get('status') 218 base_status = base_testcase.get('status')
217 if base_status: 219 if base_status:
@@ -422,6 +424,7 @@ def register_commands(subparsers):
422 help='(optional) filter the base results to this result ID') 424 help='(optional) filter the base results to this result ID')
423 parser_build.add_argument('-t', '--target-result-id', default='', 425 parser_build.add_argument('-t', '--target-result-id', default='',
424 help='(optional) filter the target results to this result ID') 426 help='(optional) filter the target results to this result ID')
427 parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes")
425 428
426 parser_build = subparsers.add_parser('regression-git', help='regression git analysis', 429 parser_build = subparsers.add_parser('regression-git', help='regression git analysis',
427 description='regression analysis comparing base result set to target ' 430 description='regression analysis comparing base result set to target '
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py
index a349510ab8..1c100b00ab 100644
--- a/scripts/lib/resulttool/report.py
+++ b/scripts/lib/resulttool/report.py
@@ -256,7 +256,7 @@ class ResultsTextReport(object):
256 if selected_test_case_only: 256 if selected_test_case_only:
257 print_selected_testcase_result(raw_results, selected_test_case_only) 257 print_selected_testcase_result(raw_results, selected_test_case_only)
258 else: 258 else:
259 print(json.dumps(raw_results, sort_keys=True, indent=4)) 259 print(json.dumps(raw_results, sort_keys=True, indent=1))
260 else: 260 else:
261 print('Could not find raw test result for %s' % raw_test) 261 print('Could not find raw test result for %s' % raw_test)
262 return 0 262 return 0
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py
index c5521d81bd..b8fc79a6ac 100644
--- a/scripts/lib/resulttool/resultutils.py
+++ b/scripts/lib/resulttool/resultutils.py
@@ -14,8 +14,11 @@ import scriptpath
14import copy 14import copy
15import urllib.request 15import urllib.request
16import posixpath 16import posixpath
17import logging
17scriptpath.add_oe_lib_path() 18scriptpath.add_oe_lib_path()
18 19
20logger = logging.getLogger('resulttool')
21
19flatten_map = { 22flatten_map = {
20 "oeselftest": [], 23 "oeselftest": [],
21 "runtime": [], 24 "runtime": [],
@@ -31,13 +34,19 @@ regression_map = {
31 "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] 34 "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE']
32} 35}
33store_map = { 36store_map = {
34 "oeselftest": ['TEST_TYPE'], 37 "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'],
35 "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], 38 "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'],
36 "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], 39 "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
37 "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], 40 "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
38 "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] 41 "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME']
39} 42}
40 43
44rawlog_sections = {
45 "ptestresult.rawlogs": "ptest",
46 "ltpresult.rawlogs": "ltp",
47 "ltpposixresult.rawlogs": "ltpposix"
48}
49
41def is_url(p): 50def is_url(p):
42 """ 51 """
43 Helper for determining if the given path is a URL 52 Helper for determining if the given path is a URL
@@ -108,21 +117,57 @@ def filter_resultsdata(results, resultid):
108 newresults[r][i] = results[r][i] 117 newresults[r][i] = results[r][i]
109 return newresults 118 return newresults
110 119
111def strip_ptestresults(results): 120def strip_logs(results):
112 newresults = copy.deepcopy(results) 121 newresults = copy.deepcopy(results)
113 #for a in newresults2:
114 # newresults = newresults2[a]
115 for res in newresults: 122 for res in newresults:
116 if 'result' not in newresults[res]: 123 if 'result' not in newresults[res]:
117 continue 124 continue
118 if 'ptestresult.rawlogs' in newresults[res]['result']: 125 for logtype in rawlog_sections:
119 del newresults[res]['result']['ptestresult.rawlogs'] 126 if logtype in newresults[res]['result']:
127 del newresults[res]['result'][logtype]
120 if 'ptestresult.sections' in newresults[res]['result']: 128 if 'ptestresult.sections' in newresults[res]['result']:
121 for i in newresults[res]['result']['ptestresult.sections']: 129 for i in newresults[res]['result']['ptestresult.sections']:
122 if 'log' in newresults[res]['result']['ptestresult.sections'][i]: 130 if 'log' in newresults[res]['result']['ptestresult.sections'][i]:
123 del newresults[res]['result']['ptestresult.sections'][i]['log'] 131 del newresults[res]['result']['ptestresult.sections'][i]['log']
124 return newresults 132 return newresults
125 133
134# For timing numbers, crazy amounts of precision don't make sense and just confuse
135# the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1,
136# trim to 4 significant digits
137def trim_durations(results):
138 for res in results:
139 if 'result' not in results[res]:
140 continue
141 for entry in results[res]['result']:
142 if 'duration' in results[res]['result'][entry]:
143 duration = results[res]['result'][entry]['duration']
144 if duration > 1:
145 results[res]['result'][entry]['duration'] = float("%.3f" % duration)
146 elif duration < 1:
147 results[res]['result'][entry]['duration'] = float("%.4g" % duration)
148 return results
149
150def handle_cleanups(results):
151 # Remove pointless path duplication from old format reproducibility results
152 for res2 in results:
153 try:
154 section = results[res2]['result']['reproducible']['files']
155 for pkgtype in section:
156 for filelist in section[pkgtype].copy():
157 if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict:
158 newlist = []
159 for entry in section[pkgtype][filelist]:
160 newlist.append(entry["reference"].split("/./")[1])
161 section[pkgtype][filelist] = newlist
162
163 except KeyError:
164 pass
165 # Remove pointless duplicate rawlogs data
166 try:
167 del results[res2]['result']['reproducible.rawlogs']
168 except KeyError:
169 pass
170
126def decode_log(logdata): 171def decode_log(logdata):
127 if isinstance(logdata, str): 172 if isinstance(logdata, str):
128 return logdata 173 return logdata
@@ -155,9 +200,6 @@ def generic_get_rawlogs(sectname, results):
155 return None 200 return None
156 return decode_log(results[sectname]['log']) 201 return decode_log(results[sectname]['log'])
157 202
158def ptestresult_get_rawlogs(results):
159 return generic_get_rawlogs('ptestresult.rawlogs', results)
160
161def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): 203def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
162 for res in results: 204 for res in results:
163 if res: 205 if res:
@@ -167,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p
167 os.makedirs(os.path.dirname(dst), exist_ok=True) 209 os.makedirs(os.path.dirname(dst), exist_ok=True)
168 resultsout = results[res] 210 resultsout = results[res]
169 if not ptestjson: 211 if not ptestjson:
170 resultsout = strip_ptestresults(results[res]) 212 resultsout = strip_logs(results[res])
213 trim_durations(resultsout)
214 handle_cleanups(resultsout)
171 with open(dst, 'w') as f: 215 with open(dst, 'w') as f:
172 f.write(json.dumps(resultsout, sort_keys=True, indent=4)) 216 f.write(json.dumps(resultsout, sort_keys=True, indent=1))
173 for res2 in results[res]: 217 for res2 in results[res]:
174 if ptestlogs and 'result' in results[res][res2]: 218 if ptestlogs and 'result' in results[res][res2]:
175 seriesresults = results[res][res2]['result'] 219 seriesresults = results[res][res2]['result']
176 rawlogs = ptestresult_get_rawlogs(seriesresults) 220 for logtype in rawlog_sections:
177 if rawlogs is not None: 221 logdata = generic_get_rawlogs(logtype, seriesresults)
178 with open(dst.replace(fn, "ptest-raw.log"), "w+") as f: 222 if logdata is not None:
179 f.write(rawlogs) 223 logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log")
224 with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f:
225 f.write(logdata)
180 if 'ptestresult.sections' in seriesresults: 226 if 'ptestresult.sections' in seriesresults:
181 for i in seriesresults['ptestresult.sections']: 227 for i in seriesresults['ptestresult.sections']:
182 sectionlog = ptestresult_get_log(seriesresults, i) 228 sectionlog = ptestresult_get_log(seriesresults, i)
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py
index e0951f0a8f..b143334e69 100644
--- a/scripts/lib/resulttool/store.py
+++ b/scripts/lib/resulttool/store.py
@@ -65,18 +65,35 @@ def store(args, logger):
65 65
66 for r in revisions: 66 for r in revisions:
67 results = revisions[r] 67 results = revisions[r]
68 if args.revision and r[0] != args.revision:
69 logger.info('skipping %s as non-matching' % r[0])
70 continue
68 keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} 71 keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]}
69 subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"]) 72 subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"])
70 resultutils.save_resultsdata(results, tempdir, ptestlogs=True) 73 resultutils.save_resultsdata(results, tempdir, ptestlogs=True)
71 74
72 logger.info('Storing test result into git repository %s' % args.git_dir) 75 logger.info('Storing test result into git repository %s' % args.git_dir)
73 76
74 gitarchive.gitarchive(tempdir, args.git_dir, False, False, 77 excludes = []
78 if args.logfile_archive:
79 excludes = ['*.log', "*.log.zst"]
80
81 tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False,
75 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", 82 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
76 False, "{branch}/{commit_count}-g{commit}/{tag_number}", 83 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
77 'Test run #{tag_number} of {branch}:{commit}', '', 84 'Test run #{tag_number} of {branch}:{commit}', '',
78 [], [], False, keywords, logger) 85 excludes, [], False, keywords, logger)
79 86
87 if args.logfile_archive:
88 logdir = args.logfile_archive + "/" + tagname
89 shutil.copytree(tempdir, logdir)
90 os.chmod(logdir, 0o755)
91 for root, dirs, files in os.walk(logdir):
92 for name in files:
93 if not name.endswith(".log"):
94 continue
95 f = os.path.join(root, name)
96 subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True)
80 finally: 97 finally:
81 subprocess.check_call(["rm", "-rf", tempdir]) 98 subprocess.check_call(["rm", "-rf", tempdir])
82 99
@@ -102,3 +119,7 @@ def register_commands(subparsers):
102 help='add executed-by configuration to each result file') 119 help='add executed-by configuration to each result file')
103 parser_build.add_argument('-t', '--extra-test-env', default='', 120 parser_build.add_argument('-t', '--extra-test-env', default='',
104 help='add extra test environment data to each result file configuration') 121 help='add extra test environment data to each result file configuration')
122 parser_build.add_argument('-r', '--revision', default='',
123 help='only store data for the specified revision')
124 parser_build.add_argument('-l', '--logfile-archive', default='',
125 help='directory to separately archive log files along with a copy of the results')
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py
index f23e53cba9..32e749dbb1 100644
--- a/scripts/lib/scriptutils.py
+++ b/scripts/lib/scriptutils.py
@@ -179,8 +179,13 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
179 f.write('SRCREV = "%s"\n' % srcrev) 179 f.write('SRCREV = "%s"\n' % srcrev)
180 f.write('PV = "0.0+"\n') 180 f.write('PV = "0.0+"\n')
181 f.write('WORKDIR = "%s"\n' % tmpworkdir) 181 f.write('WORKDIR = "%s"\n' % tmpworkdir)
182 f.write('UNPACKDIR = "%s"\n' % destdir)
183
182 # Set S out of the way so it doesn't get created under the workdir 184 # Set S out of the way so it doesn't get created under the workdir
183 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) 185 s_dir = os.path.join(tmpdir, 'emptysrc')
186 bb.utils.mkdirhier(s_dir)
187 f.write('S = "%s"\n' % s_dir)
188
184 if not mirrors: 189 if not mirrors:
185 # We do not need PREMIRRORS since we are almost certainly 190 # We do not need PREMIRRORS since we are almost certainly
186 # fetching new source rather than something that has already 191 # fetching new source rather than something that has already
@@ -232,10 +237,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
232 if e.errno != errno.ENOTEMPTY: 237 if e.errno != errno.ENOTEMPTY:
233 raise 238 raise
234 239
235 bb.utils.mkdirhier(destdir)
236 for fn in os.listdir(tmpworkdir):
237 shutil.move(os.path.join(tmpworkdir, fn), destdir)
238
239 finally: 240 finally:
240 if not preserve_tmp: 241 if not preserve_tmp:
241 shutil.rmtree(tmpdir) 242 shutil.rmtree(tmpdir)
@@ -271,12 +272,3 @@ def is_src_url(param):
271 return True 272 return True
272 return False 273 return False
273 274
274def filter_src_subdirs(pth):
275 """
276 Filter out subdirectories of initial unpacked source trees that we do not care about.
277 Used by devtool and recipetool.
278 """
279 dirlist = os.listdir(pth)
280 filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa']
281 dirlist = [x for x in dirlist if x not in filterout]
282 return dirlist
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc
index 89880b417b..4a440ddafe 100644
--- a/scripts/lib/wic/canned-wks/common.wks.inc
+++ b/scripts/lib/wic/canned-wks/common.wks.inc
@@ -1,3 +1,3 @@
1# This file is included into 3 canned wks files from this directory 1# This file is included into 3 canned wks files from this directory
2part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 2part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
3part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 3part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
index 8d7d8de6ea..cb640056f1 100644
--- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks
+++ b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
@@ -3,7 +3,7 @@
3# can directly dd to boot media. 3# can directly dd to boot media.
4 4
5 5
6part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 6part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" 9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8"
diff --git a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
index f61d941d6d..4fd1999ffb 100644
--- a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
+++ b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
@@ -15,7 +15,7 @@
15# 15#
16# - or any combinations of -r and --rootfs command line options 16# - or any combinations of -r and --rootfs command line options
17 17
18part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 18part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
19part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 19part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024
20part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 20part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024
21 21
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
index 2fd286ff98..5211972955 100644
--- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
+++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
@@ -1,3 +1,3 @@
1bootloader --ptable gpt 1bootloader --ptable gpt
2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.1 2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ 3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in
new file mode 100644
index 0000000000..cac0fa32cd
--- /dev/null
+++ b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in
@@ -0,0 +1,3 @@
1bootloader --ptable gpt --timeout=5
2part /boot --source bootimg_efi --sourceparams="loader=${EFI_PROVIDER}" --label boot --active --align 1024 --use-uuid --part-name="ESP" --part-type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B --overhead-factor=1
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks
index 9f534fe184..16dfe76dfe 100644
--- a/scripts/lib/wic/canned-wks/mkefidisk.wks
+++ b/scripts/lib/wic/canned-wks/mkefidisk.wks
@@ -2,10 +2,10 @@
2# long-description: Creates a partitioned EFI disk image that the user 2# long-description: Creates a partitioned EFI disk image that the user
3# can directly dd to boot media. 3# can directly dd to boot media.
4 4
5part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 5part /boot --source bootimg_efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024
6 6
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
9part swap --ondisk sda --size 44 --label swap1 --fstype=swap 9part swap --ondisk sda --size 44 --label swap1 --fstype=swap
10 10
11bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0" 11bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=${KERNEL_CONSOLE} console=tty0"
diff --git a/scripts/lib/wic/canned-wks/mkhybridiso.wks b/scripts/lib/wic/canned-wks/mkhybridiso.wks
index 48c5ac4791..c3a030e5b4 100644
--- a/scripts/lib/wic/canned-wks/mkhybridiso.wks
+++ b/scripts/lib/wic/canned-wks/mkhybridiso.wks
@@ -2,6 +2,6 @@
2# long-description: Creates an EFI and legacy bootable hybrid ISO image 2# long-description: Creates an EFI and legacy bootable hybrid ISO image
3# which can be used on optical media as well as USB media. 3# which can be used on optical media as well as USB media.
4 4
5part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO 5part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO
6 6
7bootloader --timeout=15 --append="" 7bootloader --timeout=15 --append=""
diff --git a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
index 63bc4dab6a..f9f8044f7d 100644
--- a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
+++ b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
@@ -2,5 +2,5 @@
2# long-description: Creates a partitioned SD card image. Boot files 2# long-description: Creates a partitioned SD card image. Boot files
3# are located in the first vfat partition. 3# are located in the first vfat partition.
4 4
5part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 5part /boot --source bootimg_partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16
6part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 6part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4
diff --git a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
index 95d7b97a60..3fb2c0e35f 100644
--- a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
+++ b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
@@ -2,7 +2,7 @@
2# long-description: Creates a partitioned EFI disk image that the user 2# long-description: Creates a partitioned EFI disk image that the user
3# can directly dd to boot media. The selected bootloader is systemd-boot. 3# can directly dd to boot media. The selected bootloader is systemd-boot.
4 4
5part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid 5part /boot --source bootimg_efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid
6 6
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py
index 674ccfc244..b9e60cbe4e 100644
--- a/scripts/lib/wic/engine.py
+++ b/scripts/lib/wic/engine.py
@@ -180,6 +180,8 @@ def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir,
180 os.makedirs(options.outdir) 180 os.makedirs(options.outdir)
181 181
182 pname = options.imager 182 pname = options.imager
183 # Don't support '-' in plugin names
184 pname = pname.replace("-", "_")
183 plugin_class = PluginMgr.get_plugins('imager').get(pname) 185 plugin_class = PluginMgr.get_plugins('imager').get(pname)
184 if not plugin_class: 186 if not plugin_class:
185 raise WicError('Unknown plugin: %s' % pname) 187 raise WicError('Unknown plugin: %s' % pname)
@@ -232,6 +234,16 @@ class Disk:
232 self._psector_size = None 234 self._psector_size = None
233 self._ptable_format = None 235 self._ptable_format = None
234 236
237 # define sector size
238 sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE')
239 if sector_size_str is not None:
240 try:
241 self.sector_size = int(sector_size_str)
242 except ValueError:
243 self.sector_size = None
244 else:
245 self.sector_size = None
246
235 # find parted 247 # find parted
236 # read paths from $PATH environment variable 248 # read paths from $PATH environment variable
237 # if it fails, use hardcoded paths 249 # if it fails, use hardcoded paths
@@ -258,7 +270,13 @@ class Disk:
258 def get_partitions(self): 270 def get_partitions(self):
259 if self._partitions is None: 271 if self._partitions is None:
260 self._partitions = OrderedDict() 272 self._partitions = OrderedDict()
261 out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) 273
274 if self.sector_size is not None:
275 out = exec_cmd("export PARTED_SECTOR_SIZE=%d; %s -sm %s unit B print" % \
276 (self.sector_size, self.parted, self.imagepath), True)
277 else:
278 out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath))
279
262 parttype = namedtuple("Part", "pnum start end size fstype") 280 parttype = namedtuple("Part", "pnum start end size fstype")
263 splitted = out.splitlines() 281 splitted = out.splitlines()
264 # skip over possible errors in exec_cmd output 282 # skip over possible errors in exec_cmd output
@@ -359,7 +377,7 @@ class Disk:
359 Remove files/dirs and their contents from the partition. 377 Remove files/dirs and their contents from the partition.
360 This only applies to ext* partition. 378 This only applies to ext* partition.
361 """ 379 """
362 abs_path = re.sub('\/\/+', '/', path) 380 abs_path = re.sub(r'\/\/+', '/', path)
363 cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, 381 cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs,
364 self._get_part_image(pnum), 382 self._get_part_image(pnum),
365 abs_path) 383 abs_path)
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py
index 163535e431..2e3061f343 100644
--- a/scripts/lib/wic/help.py
+++ b/scripts/lib/wic/help.py
@@ -544,18 +544,18 @@ DESCRIPTION
544 the --source param given to that partition. For example, if the 544 the --source param given to that partition. For example, if the
545 partition is set up like this: 545 partition is set up like this:
546 546
547 part /boot --source bootimg-pcbios ... 547 part /boot --source bootimg_pcbios ...
548 548
549 then the methods defined as class members of the plugin having the 549 then the methods defined as class members of the plugin having the
550 matching bootimg-pcbios .name class member would be used. 550 matching bootimg_pcbios .name class member would be used.
551 551
552 To be more concrete, here's the plugin definition that would match 552 To be more concrete, here's the plugin definition that would match
553 a '--source bootimg-pcbios' usage, along with an example method 553 a '--source bootimg_pcbios' usage, along with an example method
554 that would be called by the wic implementation when it needed to 554 that would be called by the wic implementation when it needed to
555 invoke an implementation-specific partition-preparation function: 555 invoke an implementation-specific partition-preparation function:
556 556
557 class BootimgPcbiosPlugin(SourcePlugin): 557 class BootimgPcbiosPlugin(SourcePlugin):
558 name = 'bootimg-pcbios' 558 name = 'bootimg_pcbios'
559 559
560 @classmethod 560 @classmethod
561 def do_prepare_partition(self, part, ...) 561 def do_prepare_partition(self, part, ...)
@@ -794,7 +794,7 @@ DESCRIPTION
794 794
795 Here is a content of test.wks: 795 Here is a content of test.wks:
796 796
797 part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 797 part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
798 part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 798 part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024
799 799
800 bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" 800 bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0"
@@ -916,6 +916,10 @@ DESCRIPTION
916 will create empty partition. --size parameter has 916 will create empty partition. --size parameter has
917 to be used to specify size of empty partition. 917 to be used to specify size of empty partition.
918 918
919 --sourceparams: This option is specific to wic. Supply additional
920 parameters to the source plugin in
921 key1=value1,key2 format.
922
919 --ondisk or --ondrive: Forces the partition to be created on 923 --ondisk or --ondrive: Forces the partition to be created on
920 a particular disk. 924 a particular disk.
921 925
@@ -932,6 +936,7 @@ DESCRIPTION
932 squashfs 936 squashfs
933 erofs 937 erofs
934 swap 938 swap
939 none
935 940
936 --fsoptions: Specifies a free-form string of options to be 941 --fsoptions: Specifies a free-form string of options to be
937 used when mounting the filesystem. This string 942 used when mounting the filesystem. This string
@@ -965,6 +970,14 @@ DESCRIPTION
965 to start a partition on an x KBytes 970 to start a partition on an x KBytes
966 boundary. 971 boundary.
967 972
973 --offset: This option is specific to wic that says to place a partition
974 at exactly the specified offset. If the partition cannot be
975 placed at the specified offset, the image build will fail.
976 Specify as an integer value optionally followed by one of the
977 units s/S for 512 byte sector, k/K for kibibyte, M for
978 mebibyte and G for gibibyte. The default unit if none is
979 given is k.
980
968 --no-table: This option is specific to wic. Space will be 981 --no-table: This option is specific to wic. Space will be
969 reserved for the partition and it will be 982 reserved for the partition and it will be
970 populated but it will not be added to the 983 populated but it will not be added to the
@@ -1045,6 +1058,18 @@ DESCRIPTION
1045 not take effect when --mkfs-extraopts is used. This should be taken into 1058 not take effect when --mkfs-extraopts is used. This should be taken into
1046 account when using --mkfs-extraopts. 1059 account when using --mkfs-extraopts.
1047 1060
1061 --type: This option is specific to wic. Valid values are 'primary',
1062 'logical'. For msdos partition tables, this option specifies
1063 the partition type.
1064
1065 --hidden: This option is specific to wic. This option sets the
1066 RequiredPartition bit (bit 0) on GPT partitions.
1067
1068 --mbr: This option is specific to wic. This option is used with the
1069 gpt-hybrid partition type that uses both a GPT partition and
1070 an MBR header. Partitions with this flag will be included in
1071 this MBR header.
1072
1048 * bootloader 1073 * bootloader
1049 1074
1050 This command allows the user to specify various bootloader 1075 This command allows the user to specify various bootloader
@@ -1063,6 +1088,13 @@ DESCRIPTION
1063 file. Using this option will override any other 1088 file. Using this option will override any other
1064 bootloader option. 1089 bootloader option.
1065 1090
1091 --ptable: Specifies the partition table format. Valid values are
1092 'msdos', 'gpt', 'gpt-hybrid'.
1093
1094 --source: Specifies the source plugin. If not specified, the
1095 --source value will be copied from the partition that has
1096 /boot as mountpoint.
1097
1066 Note that bootloader functionality and boot partitions are 1098 Note that bootloader functionality and boot partitions are
1067 implemented by the various --source plugins that implement 1099 implemented by the various --source plugins that implement
1068 bootloader functionality; the bootloader command essentially 1100 bootloader functionality; the bootloader command essentially
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py
index 795707ec5d..b34691d313 100644
--- a/scripts/lib/wic/partition.py
+++ b/scripts/lib/wic/partition.py
@@ -164,6 +164,9 @@ class Partition():
164 164
165 plugins = PluginMgr.get_plugins('source') 165 plugins = PluginMgr.get_plugins('source')
166 166
167 # Don't support '-' in plugin names
168 self.source = self.source.replace("-", "_")
169
167 if self.source not in plugins: 170 if self.source not in plugins:
168 raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" 171 raise WicError("The '%s' --source specified for %s doesn't exist.\n\t"
169 "See 'wic list source-plugins' for a list of available" 172 "See 'wic list source-plugins' for a list of available"
@@ -178,7 +181,7 @@ class Partition():
178 splitted = self.sourceparams.split(',') 181 splitted = self.sourceparams.split(',')
179 srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) 182 srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par)
180 183
181 plugin = PluginMgr.get_plugins('source')[self.source] 184 plugin = plugins[self.source]
182 plugin.do_configure_partition(self, srcparams_dict, creator, 185 plugin.do_configure_partition(self, srcparams_dict, creator,
183 cr_workdir, oe_builddir, bootimg_dir, 186 cr_workdir, oe_builddir, bootimg_dir,
184 kernel_dir, native_sysroot) 187 kernel_dir, native_sysroot)
@@ -222,19 +225,19 @@ class Partition():
222 if (pseudo_dir): 225 if (pseudo_dir):
223 # Canonicalize the ignore paths. This corresponds to 226 # Canonicalize the ignore paths. This corresponds to
224 # calling oe.path.canonicalize(), which is used in bitbake.conf. 227 # calling oe.path.canonicalize(), which is used in bitbake.conf.
225 ignore_paths = [rootfs] + (get_bitbake_var("PSEUDO_IGNORE_PATHS") or "").split(",") 228 include_paths = [rootfs_dir] + (get_bitbake_var("PSEUDO_INCLUDE_PATHS") or "").split(",")
226 canonical_paths = [] 229 canonical_paths = []
227 for path in ignore_paths: 230 for path in include_paths:
228 if "$" not in path: 231 if "$" not in path:
229 trailing_slash = path.endswith("/") and "/" or "" 232 trailing_slash = path.endswith("/") and "/" or ""
230 canonical_paths.append(os.path.realpath(path) + trailing_slash) 233 canonical_paths.append(os.path.realpath(path) + trailing_slash)
231 ignore_paths = ",".join(canonical_paths) 234 include_paths = ",".join(canonical_paths)
232 235
233 pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix 236 pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix
234 pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir 237 pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir
235 pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir 238 pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir
236 pseudo += "export PSEUDO_NOSYMLINKEXP=1;" 239 pseudo += "export PSEUDO_NOSYMLINKEXP=1;"
237 pseudo += "export PSEUDO_IGNORE_PATHS=%s;" % ignore_paths 240 pseudo += "export PSEUDO_INCLUDE_PATHS=%s;" % include_paths
238 pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") 241 pseudo += "%s " % get_bitbake_var("FAKEROOTCMD")
239 else: 242 else:
240 pseudo = None 243 pseudo = None
@@ -244,7 +247,7 @@ class Partition():
244 # from bitbake variable 247 # from bitbake variable
245 rsize_bb = get_bitbake_var('ROOTFS_SIZE') 248 rsize_bb = get_bitbake_var('ROOTFS_SIZE')
246 rdir = get_bitbake_var('IMAGE_ROOTFS') 249 rdir = get_bitbake_var('IMAGE_ROOTFS')
247 if rsize_bb and rdir == rootfs_dir: 250 if rsize_bb and (rdir == rootfs_dir or (rootfs_dir.split('/')[-2] == "tmp-wic" and rootfs_dir.split('/')[-1][:6] == "rootfs")):
248 # Bitbake variable ROOTFS_SIZE is calculated in 251 # Bitbake variable ROOTFS_SIZE is calculated in
249 # Image._get_rootfs_size method from meta/lib/oe/image.py 252 # Image._get_rootfs_size method from meta/lib/oe/image.py
250 # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, 253 # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
@@ -284,19 +287,8 @@ class Partition():
284 287
285 extraopts = self.mkfs_extraopts or "-F -i 8192" 288 extraopts = self.mkfs_extraopts or "-F -i 8192"
286 289
287 if os.getenv('SOURCE_DATE_EPOCH'): 290 # use hash_seed to generate reproducible ext4 images
288 sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) 291 (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo)
289 if pseudo:
290 pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo)
291 else:
292 pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time
293
294 # Set hash_seed to generate deterministic directory indexes
295 namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460")
296 if self.fsuuid:
297 namespace = uuid.UUID(self.fsuuid)
298 hash_seed = str(uuid.uuid5(namespace, str(sde_time)))
299 extraopts += " -E hash_seed=%s" % hash_seed
300 292
301 label_str = "" 293 label_str = ""
302 if self.label: 294 if self.label:
@@ -344,6 +336,23 @@ class Partition():
344 336
345 self.check_for_Y2038_problem(rootfs, native_sysroot) 337 self.check_for_Y2038_problem(rootfs, native_sysroot)
346 338
339 def get_hash_seed_ext4(self, extraopts, pseudo):
340 if os.getenv('SOURCE_DATE_EPOCH'):
341 sde_time = int(os.getenv('SOURCE_DATE_EPOCH'))
342 if pseudo:
343 pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo)
344 else:
345 pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time
346
347 # Set hash_seed to generate deterministic directory indexes
348 namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460")
349 if self.fsuuid:
350 namespace = uuid.UUID(self.fsuuid)
351 hash_seed = str(uuid.uuid5(namespace, str(sde_time)))
352 extraopts += " -E hash_seed=%s" % hash_seed
353
354 return (extraopts, pseudo)
355
347 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, 356 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
348 native_sysroot, pseudo): 357 native_sysroot, pseudo):
349 """ 358 """
@@ -437,13 +446,16 @@ class Partition():
437 446
438 extraopts = self.mkfs_extraopts or "-i 8192" 447 extraopts = self.mkfs_extraopts or "-i 8192"
439 448
449 # use hash_seed to generate reproducible ext4 images
450 (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None)
451
440 label_str = "" 452 label_str = ""
441 if self.label: 453 if self.label:
442 label_str = "-L %s" % self.label 454 label_str = "-L %s" % self.label
443 455
444 mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ 456 mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \
445 (self.fstype, extraopts, label_str, self.fsuuid, rootfs) 457 (self.fstype, extraopts, label_str, self.fsuuid, rootfs)
446 exec_native_cmd(mkfs_cmd, native_sysroot) 458 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
447 459
448 self.check_for_Y2038_problem(rootfs, native_sysroot) 460 self.check_for_Y2038_problem(rootfs, native_sysroot)
449 461
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py
index b64568339b..640da292d3 100644
--- a/scripts/lib/wic/pluginbase.py
+++ b/scripts/lib/wic/pluginbase.py
@@ -44,7 +44,7 @@ class PluginMgr:
44 path = os.path.join(layer_path, script_plugin_dir) 44 path = os.path.join(layer_path, script_plugin_dir)
45 path = os.path.abspath(os.path.expanduser(path)) 45 path = os.path.abspath(os.path.expanduser(path))
46 if path not in cls._plugin_dirs and os.path.isdir(path): 46 if path not in cls._plugin_dirs and os.path.isdir(path):
47 cls._plugin_dirs.insert(0, path) 47 cls._plugin_dirs.append(path)
48 48
49 if ptype not in PLUGINS: 49 if ptype not in PLUGINS:
50 # load all ptype plugins 50 # load all ptype plugins
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py
index a1d152659b..6e1f1c8cba 100644
--- a/scripts/lib/wic/plugins/imager/direct.py
+++ b/scripts/lib/wic/plugins/imager/direct.py
@@ -203,6 +203,8 @@ class DirectPlugin(ImagerPlugin):
203 source_plugin = self.ks.bootloader.source 203 source_plugin = self.ks.bootloader.source
204 disk_name = self.parts[0].disk 204 disk_name = self.parts[0].disk
205 if source_plugin: 205 if source_plugin:
206 # Don't support '-' in plugin names
207 source_plugin = source_plugin.replace("-", "_")
206 plugin = PluginMgr.get_plugins('source')[source_plugin] 208 plugin = PluginMgr.get_plugins('source')[source_plugin]
207 plugin.do_install_disk(self._image, disk_name, self, self.workdir, 209 plugin.do_install_disk(self._image, disk_name, self, self.workdir,
208 self.oe_builddir, self.bootimg_dir, 210 self.oe_builddir, self.bootimg_dir,
@@ -321,7 +323,15 @@ class PartitionedImage():
321 self.partitions = partitions 323 self.partitions = partitions
322 self.partimages = [] 324 self.partimages = []
323 # Size of a sector used in calculations 325 # Size of a sector used in calculations
324 self.sector_size = SECTOR_SIZE 326 sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE')
327 if sector_size_str is not None:
328 try:
329 self.sector_size = int(sector_size_str)
330 except ValueError:
331 self.sector_size = SECTOR_SIZE
332 else:
333 self.sector_size = SECTOR_SIZE
334
325 self.native_sysroot = native_sysroot 335 self.native_sysroot = native_sysroot
326 num_real_partitions = len([p for p in self.partitions if not p.no_table]) 336 num_real_partitions = len([p for p in self.partitions if not p.no_table])
327 self.extra_space = extra_space 337 self.extra_space = extra_space
@@ -508,7 +518,8 @@ class PartitionedImage():
508 logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", 518 logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors",
509 parttype, start, end, size) 519 parttype, start, end, size)
510 520
511 cmd = "parted -s %s unit s mkpart %s" % (device, parttype) 521 cmd = "export PARTED_SECTOR_SIZE=%d; parted -s %s unit s mkpart %s" % \
522 (self.sector_size, device, parttype)
512 if fstype: 523 if fstype:
513 cmd += " %s" % fstype 524 cmd += " %s" % fstype
514 cmd += " %d %d" % (start, end) 525 cmd += " %d %d" % (start, end)
@@ -527,8 +538,8 @@ class PartitionedImage():
527 os.ftruncate(sparse.fileno(), min_size) 538 os.ftruncate(sparse.fileno(), min_size)
528 539
529 logger.debug("Initializing partition table for %s", device) 540 logger.debug("Initializing partition table for %s", device)
530 exec_native_cmd("parted -s %s mklabel %s" % (device, ptable_format), 541 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s mklabel %s" %
531 self.native_sysroot) 542 (self.sector_size, device, ptable_format), self.native_sysroot)
532 543
533 def _write_disk_guid(self): 544 def _write_disk_guid(self):
534 if self.ptable_format in ('gpt', 'gpt-hybrid'): 545 if self.ptable_format in ('gpt', 'gpt-hybrid'):
@@ -538,7 +549,8 @@ class PartitionedImage():
538 self.disk_guid = uuid.uuid4() 549 self.disk_guid = uuid.uuid4()
539 550
540 logger.debug("Set disk guid %s", self.disk_guid) 551 logger.debug("Set disk guid %s", self.disk_guid)
541 sfdisk_cmd = "sfdisk --disk-id %s %s" % (self.path, self.disk_guid) 552 sfdisk_cmd = "sfdisk --sector-size %s --disk-id %s %s" % \
553 (self.sector_size, self.path, self.disk_guid)
542 exec_native_cmd(sfdisk_cmd, self.native_sysroot) 554 exec_native_cmd(sfdisk_cmd, self.native_sysroot)
543 555
544 def create(self): 556 def create(self):
@@ -613,45 +625,44 @@ class PartitionedImage():
613 partition_label = part.part_name if part.part_name else part.label 625 partition_label = part.part_name if part.part_name else part.label
614 logger.debug("partition %d: set name to %s", 626 logger.debug("partition %d: set name to %s",
615 part.num, partition_label) 627 part.num, partition_label)
616 exec_native_cmd("sgdisk --change-name=%d:%s %s" % \ 628 exec_native_cmd("sfdisk --sector-size %s --part-label %s %d %s" % \
617 (part.num, partition_label, 629 (self.sector_size, self.path, part.num,
618 self.path), self.native_sysroot) 630 partition_label), self.native_sysroot)
619
620 if part.part_type: 631 if part.part_type:
621 logger.debug("partition %d: set type UID to %s", 632 logger.debug("partition %d: set type UID to %s",
622 part.num, part.part_type) 633 part.num, part.part_type)
623 exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ 634 exec_native_cmd("sfdisk --sector-size %s --part-type %s %d %s" % \
624 (part.num, part.part_type, 635 (self.sector_size, self.path, part.num,
625 self.path), self.native_sysroot) 636 part.part_type), self.native_sysroot)
626 637
627 if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): 638 if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"):
628 logger.debug("partition %d: set UUID to %s", 639 logger.debug("partition %d: set UUID to %s",
629 part.num, part.uuid) 640 part.num, part.uuid)
630 exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ 641 exec_native_cmd("sfdisk --sector-size %s --part-uuid %s %d %s" % \
631 (part.num, part.uuid, self.path), 642 (self.sector_size, self.path, part.num, part.uuid),
632 self.native_sysroot) 643 self.native_sysroot)
633 644
634 if part.active: 645 if part.active:
635 flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" 646 flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot"
636 logger.debug("Set '%s' flag for partition '%s' on disk '%s'", 647 logger.debug("Set '%s' flag for partition '%s' on disk '%s'",
637 flag_name, part.num, self.path) 648 flag_name, part.num, self.path)
638 exec_native_cmd("parted -s %s set %d %s on" % \ 649 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \
639 (self.path, part.num, flag_name), 650 (self.sector_size, self.path, part.num, flag_name),
640 self.native_sysroot) 651 self.native_sysroot)
641 if self.ptable_format == 'gpt-hybrid' and part.mbr: 652 if self.ptable_format == 'gpt-hybrid' and part.mbr:
642 exec_native_cmd("parted -s %s set %d %s on" % \ 653 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \
643 (mbr_path, hybrid_mbr_part_num, "boot"), 654 (self.sector_size, mbr_path, hybrid_mbr_part_num, "boot"),
644 self.native_sysroot) 655 self.native_sysroot)
645 if part.system_id: 656 if part.system_id:
646 exec_native_cmd("sfdisk --part-type %s %s %s" % \ 657 exec_native_cmd("sfdisk --sector-size %s --part-type %s %s %s" % \
647 (self.path, part.num, part.system_id), 658 (self.sector_size, self.path, part.num, part.system_id),
648 self.native_sysroot) 659 self.native_sysroot)
649 660
650 if part.hidden and self.ptable_format == "gpt": 661 if part.hidden and self.ptable_format == "gpt":
651 logger.debug("Set hidden attribute for partition '%s' on disk '%s'", 662 logger.debug("Set hidden attribute for partition '%s' on disk '%s'",
652 part.num, self.path) 663 part.num, self.path)
653 exec_native_cmd("sfdisk --part-attrs %s %s RequiredPartition" % \ 664 exec_native_cmd("sfdisk --sector-size %s --part-attrs %s %s RequiredPartition" % \
654 (self.path, part.num), 665 (self.sector_size, self.path, part.num),
655 self.native_sysroot) 666 self.native_sysroot)
656 667
657 if self.ptable_format == "gpt-hybrid": 668 if self.ptable_format == "gpt-hybrid":
@@ -664,7 +675,8 @@ class PartitionedImage():
664 # create with an arbitrary type, then change it to the correct type 675 # create with an arbitrary type, then change it to the correct type
665 # with sfdisk 676 # with sfdisk
666 self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) 677 self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD)
667 exec_native_cmd("sfdisk --part-type %s %d 0xee" % (mbr_path, hybrid_mbr_part_num), 678 exec_native_cmd("sfdisk --sector-size %s --part-type %s %d 0xee" % \
679 (self.sector_size, mbr_path, hybrid_mbr_part_num),
668 self.native_sysroot) 680 self.native_sysroot)
669 681
670 # Copy hybrid MBR 682 # Copy hybrid MBR
diff --git a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py
index 5bd7390680..4279ddded8 100644
--- a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
+++ b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py
@@ -13,7 +13,7 @@
13# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 13# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
14# 14#
15# DESCRIPTION 15# DESCRIPTION
16# This implements the 'bootimg-biosplusefi' source plugin class for 'wic' 16# This implements the 'bootimg_biosplusefi' source plugin class for 'wic'
17# 17#
18# AUTHORS 18# AUTHORS
19# William Bourque <wbourque [at) gmail.com> 19# William Bourque <wbourque [at) gmail.com>
@@ -34,7 +34,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
34 34
35 Note it is possible to create an image that can boot from both 35 Note it is possible to create an image that can boot from both
36 legacy BIOS and EFI by defining two partitions : one with arg 36 legacy BIOS and EFI by defining two partitions : one with arg
37 --source bootimg-efi and another one with --source bootimg-pcbios. 37 --source bootimg_efi and another one with --source bootimg_pcbios.
38 However, this method has the obvious downside that it requires TWO 38 However, this method has the obvious downside that it requires TWO
39 partitions to be created on the storage device. 39 partitions to be created on the storage device.
40 Both partitions will also be marked as "bootable" which does not work on 40 Both partitions will also be marked as "bootable" which does not work on
@@ -45,7 +45,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
45 the first partition will be duplicated into the second, even though it 45 the first partition will be duplicated into the second, even though it
46 will not be used at all. 46 will not be used at all.
47 47
48 Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin 48 Also, unlike "isoimage_isohybrid" that also does BIOS and EFI, this plugin
49 allows you to have more than only a single rootfs partitions and does 49 allows you to have more than only a single rootfs partitions and does
50 not turn the rootfs into an initramfs RAM image. 50 not turn the rootfs into an initramfs RAM image.
51 51
@@ -53,32 +53,32 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
53 does not have the limitations listed above. 53 does not have the limitations listed above.
54 54
55 The plugin is made so it does tries not to reimplement what's already 55 The plugin is made so it does tries not to reimplement what's already
56 been done in other plugins; as such it imports "bootimg-pcbios" 56 been done in other plugins; as such it imports "bootimg_pcbios"
57 and "bootimg-efi". 57 and "bootimg_efi".
58 Plugin "bootimg-pcbios" is used to generate legacy BIOS boot. 58 Plugin "bootimg_pcbios" is used to generate legacy BIOS boot.
59 Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it 59 Plugin "bootimg_efi" is used to generate the UEFI boot. Note that it
60 requires a --sourceparams argument to know which loader to use; refer 60 requires a --sourceparams argument to know which loader to use; refer
61 to "bootimg-efi" code/documentation for the list of loader. 61 to "bootimg_efi" code/documentation for the list of loader.
62 62
63 Imports are handled with "SourceFileLoader" from importlib as it is 63 Imports are handled with "SourceFileLoader" from importlib as it is
64 otherwise very difficult to import module that has hyphen "-" in their 64 otherwise very difficult to import module that has hyphen "-" in their
65 filename. 65 filename.
66 The SourcePlugin() methods used in the plugins (do_install_disk, 66 The SourcePlugin() methods used in the plugins (do_install_disk,
67 do_configure_partition, do_prepare_partition) are then called on both, 67 do_configure_partition, do_prepare_partition) are then called on both,
68 beginning by "bootimg-efi". 68 beginning by "bootimg_efi".
69 69
70 Plugin options, such as "--sourceparams" can still be passed to a 70 Plugin options, such as "--sourceparams" can still be passed to a
71 plugin, as long they does not cause issue in the other plugin. 71 plugin, as long they does not cause issue in the other plugin.
72 72
73 Example wic configuration: 73 Example wic configuration:
74 part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\ 74 part /boot --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\\
75 --ondisk sda --label os_boot --active --align 1024 --use-uuid 75 --ondisk sda --label os_boot --active --align 1024 --use-uuid
76 """ 76 """
77 77
78 name = 'bootimg-biosplusefi' 78 name = 'bootimg_biosplusefi'
79 79
80 __PCBIOS_MODULE_NAME = "bootimg-pcbios" 80 __PCBIOS_MODULE_NAME = "bootimg_pcbios"
81 __EFI_MODULE_NAME = "bootimg-efi" 81 __EFI_MODULE_NAME = "bootimg_efi"
82 82
83 __imgEFIObj = None 83 __imgEFIObj = None
84 __imgBiosObj = None 84 __imgBiosObj = None
@@ -100,7 +100,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
100 100
101 """ 101 """
102 102
103 # Import bootimg-pcbios (class name "BootimgPcbiosPlugin") 103 # Import bootimg_pcbios (class name "BootimgPcbiosPlugin")
104 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), 104 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
105 cls.__PCBIOS_MODULE_NAME + ".py") 105 cls.__PCBIOS_MODULE_NAME + ".py")
106 loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) 106 loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath)
@@ -108,7 +108,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
108 loader.exec_module(mod) 108 loader.exec_module(mod)
109 cls.__imgBiosObj = mod.BootimgPcbiosPlugin() 109 cls.__imgBiosObj = mod.BootimgPcbiosPlugin()
110 110
111 # Import bootimg-efi (class name "BootimgEFIPlugin") 111 # Import bootimg_efi (class name "BootimgEFIPlugin")
112 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), 112 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
113 cls.__EFI_MODULE_NAME + ".py") 113 cls.__EFI_MODULE_NAME + ".py")
114 loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) 114 loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath)
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg_efi.py
index 13a9cddf4e..cf16705a28 100644
--- a/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/scripts/lib/wic/plugins/source/bootimg_efi.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'bootimg-efi' source plugin class for 'wic' 7# This implements the 'bootimg_efi' source plugin class for 'wic'
8# 8#
9# AUTHORS 9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com> 10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
@@ -32,7 +32,7 @@ class BootimgEFIPlugin(SourcePlugin):
32 This plugin supports GRUB 2 and systemd-boot bootloaders. 32 This plugin supports GRUB 2 and systemd-boot bootloaders.
33 """ 33 """
34 34
35 name = 'bootimg-efi' 35 name = 'bootimg_efi'
36 36
37 @classmethod 37 @classmethod
38 def _copy_additional_files(cls, hdddir, initrd, dtb): 38 def _copy_additional_files(cls, hdddir, initrd, dtb):
@@ -43,16 +43,18 @@ class BootimgEFIPlugin(SourcePlugin):
43 if initrd: 43 if initrd:
44 initrds = initrd.split(';') 44 initrds = initrd.split(';')
45 for rd in initrds: 45 for rd in initrds:
46 cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir) 46 cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, rd, hdddir)
47 exec_cmd(cp_cmd, True) 47 out = exec_cmd(cp_cmd, True)
48 logger.debug("initrd files:\n%s" % (out))
48 else: 49 else:
49 logger.debug("Ignoring missing initrd") 50 logger.debug("Ignoring missing initrd")
50 51
51 if dtb: 52 if dtb:
52 if ';' in dtb: 53 if ';' in dtb:
53 raise WicError("Only one DTB supported, exiting") 54 raise WicError("Only one DTB supported, exiting")
54 cp_cmd = "cp %s/%s %s" % (bootimg_dir, dtb, hdddir) 55 cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, dtb, hdddir)
55 exec_cmd(cp_cmd, True) 56 out = exec_cmd(cp_cmd, True)
57 logger.debug("dtb files:\n%s" % (out))
56 58
57 @classmethod 59 @classmethod
58 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): 60 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params):
@@ -123,8 +125,16 @@ class BootimgEFIPlugin(SourcePlugin):
123 @classmethod 125 @classmethod
124 def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): 126 def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params):
125 """ 127 """
126 Create loader-specific systemd-boot/gummiboot config 128 Create loader-specific systemd-boot/gummiboot config. Unified Kernel Image (uki)
129 support is done in image recipe with uki.bbclass and only systemd-boot loader config
130 and ESP partition structure is created here.
127 """ 131 """
132 # detect uki.bbclass usage
133 image_classes = get_bitbake_var("IMAGE_CLASSES").split()
134 unified_image = False
135 if "uki" in image_classes:
136 unified_image = True
137
128 install_cmd = "install -d %s/loader" % hdddir 138 install_cmd = "install -d %s/loader" % hdddir
129 exec_cmd(install_cmd) 139 exec_cmd(install_cmd)
130 140
@@ -132,28 +142,26 @@ class BootimgEFIPlugin(SourcePlugin):
132 exec_cmd(install_cmd) 142 exec_cmd(install_cmd)
133 143
134 bootloader = creator.ks.bootloader 144 bootloader = creator.ks.bootloader
135
136 unified_image = source_params.get('create-unified-kernel-image') == "true"
137
138 loader_conf = "" 145 loader_conf = ""
139 if not unified_image:
140 loader_conf += "default boot\n"
141 loader_conf += "timeout %d\n" % bootloader.timeout
142 146
143 initrd = source_params.get('initrd') 147 # 5 seconds is a sensible default timeout
144 dtb = source_params.get('dtb') 148 loader_conf += "timeout %d\n" % (bootloader.timeout or 5)
145
146 if not unified_image:
147 cls._copy_additional_files(hdddir, initrd, dtb)
148 149
149 logger.debug("Writing systemd-boot config " 150 logger.debug("Writing systemd-boot config "
150 "%s/hdd/boot/loader/loader.conf", cr_workdir) 151 "%s/hdd/boot/loader/loader.conf", cr_workdir)
151 cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") 152 cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w")
152 cfg.write(loader_conf) 153 cfg.write(loader_conf)
154 logger.debug("loader.conf:\n%s" % (loader_conf))
153 cfg.close() 155 cfg.close()
154 156
157 initrd = source_params.get('initrd')
158 dtb = source_params.get('dtb')
159 if not unified_image:
160 cls._copy_additional_files(hdddir, initrd, dtb)
161
155 configfile = creator.ks.bootloader.configfile 162 configfile = creator.ks.bootloader.configfile
156 custom_cfg = None 163 custom_cfg = None
164 boot_conf = ""
157 if configfile: 165 if configfile:
158 custom_cfg = get_custom_config(configfile) 166 custom_cfg = get_custom_config(configfile)
159 if custom_cfg: 167 if custom_cfg:
@@ -164,8 +172,7 @@ class BootimgEFIPlugin(SourcePlugin):
164 else: 172 else:
165 raise WicError("configfile is specified but failed to " 173 raise WicError("configfile is specified but failed to "
166 "get it from %s.", configfile) 174 "get it from %s.", configfile)
167 175 else:
168 if not custom_cfg:
169 # Create systemd-boot configuration using parameters from wks file 176 # Create systemd-boot configuration using parameters from wks file
170 kernel = get_bitbake_var("KERNEL_IMAGETYPE") 177 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
171 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": 178 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
@@ -175,7 +182,6 @@ class BootimgEFIPlugin(SourcePlugin):
175 182
176 title = source_params.get('title') 183 title = source_params.get('title')
177 184
178 boot_conf = ""
179 boot_conf += "title %s\n" % (title if title else "boot") 185 boot_conf += "title %s\n" % (title if title else "boot")
180 boot_conf += "linux /%s\n" % kernel 186 boot_conf += "linux /%s\n" % kernel
181 187
@@ -200,6 +206,7 @@ class BootimgEFIPlugin(SourcePlugin):
200 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) 206 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir)
201 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") 207 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w")
202 cfg.write(boot_conf) 208 cfg.write(boot_conf)
209 logger.debug("boot.conf:\n%s" % (boot_conf))
203 cfg.close() 210 cfg.close()
204 211
205 212
@@ -223,9 +230,9 @@ class BootimgEFIPlugin(SourcePlugin):
223 elif source_params['loader'] == 'uefi-kernel': 230 elif source_params['loader'] == 'uefi-kernel':
224 pass 231 pass
225 else: 232 else:
226 raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) 233 raise WicError("unrecognized bootimg_efi loader: %s" % source_params['loader'])
227 except KeyError: 234 except KeyError:
228 raise WicError("bootimg-efi requires a loader, none specified") 235 raise WicError("bootimg_efi requires a loader, none specified")
229 236
230 if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: 237 if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None:
231 logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') 238 logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES')
@@ -245,7 +252,7 @@ class BootimgEFIPlugin(SourcePlugin):
245 252
246 # list of tuples (src_name, dst_name) 253 # list of tuples (src_name, dst_name)
247 deploy_files = [] 254 deploy_files = []
248 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): 255 for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files):
249 if ';' in src_entry: 256 if ';' in src_entry:
250 dst_entry = tuple(src_entry.split(';')) 257 dst_entry = tuple(src_entry.split(';'))
251 if not dst_entry[0] or not dst_entry[1]: 258 if not dst_entry[0] or not dst_entry[1]:
@@ -304,134 +311,43 @@ class BootimgEFIPlugin(SourcePlugin):
304 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) 311 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
305 312
306 if source_params.get('create-unified-kernel-image') == "true": 313 if source_params.get('create-unified-kernel-image') == "true":
307 initrd = source_params.get('initrd') 314 raise WicError("create-unified-kernel-image is no longer supported. Please use uki.bbclass.")
308 if not initrd:
309 raise WicError("initrd= must be specified when create-unified-kernel-image=true, exiting")
310
311 deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
312 efi_stub = glob("%s/%s" % (deploy_dir, "linux*.efi.stub"))
313 if len(efi_stub) == 0:
314 raise WicError("Unified Kernel Image EFI stub not found, exiting")
315 efi_stub = efi_stub[0]
316
317 with tempfile.TemporaryDirectory() as tmp_dir:
318 label = source_params.get('label')
319 label_conf = "root=%s" % creator.rootdev
320 if label:
321 label_conf = "LABEL=%s" % label
322
323 bootloader = creator.ks.bootloader
324 cmdline = open("%s/cmdline" % tmp_dir, "w")
325 cmdline.write("%s %s" % (label_conf, bootloader.append))
326 cmdline.close()
327 315
328 initrds = initrd.split(';') 316 if source_params.get('install-kernel-into-boot-dir') != 'false':
329 initrd = open("%s/initrd" % tmp_dir, "wb") 317 install_cmd = "install -v -p -m 0644 %s/%s %s/%s" % \
330 for f in initrds: 318 (staging_kernel_dir, kernel, hdddir, kernel)
331 with open("%s/%s" % (deploy_dir, f), 'rb') as in_file: 319 out = exec_cmd(install_cmd)
332 shutil.copyfileobj(in_file, initrd) 320 logger.debug("Installed kernel files:\n%s" % out)
333 initrd.close()
334
335 # Searched by systemd-boot:
336 # https://systemd.io/BOOT_LOADER_SPECIFICATION/#type-2-efi-unified-kernel-images
337 install_cmd = "install -d %s/EFI/Linux" % hdddir
338 exec_cmd(install_cmd)
339
340 staging_dir_host = get_bitbake_var("STAGING_DIR_HOST")
341 target_sys = get_bitbake_var("TARGET_SYS")
342
343 objdump_cmd = "%s-objdump" % target_sys
344 objdump_cmd += " -p %s" % efi_stub
345 objdump_cmd += " | awk '{ if ($1 == \"SectionAlignment\"){print $2} }'"
346
347 ret, align_str = exec_native_cmd(objdump_cmd, native_sysroot)
348 align = int(align_str, 16)
349
350 objdump_cmd = "%s-objdump" % target_sys
351 objdump_cmd += " -h %s | tail -2" % efi_stub
352 ret, output = exec_native_cmd(objdump_cmd, native_sysroot)
353
354 offset = int(output.split()[2], 16) + int(output.split()[3], 16)
355
356 osrel_off = offset + align - offset % align
357 osrel_path = "%s/usr/lib/os-release" % staging_dir_host
358 osrel_sz = os.stat(osrel_path).st_size
359
360 cmdline_off = osrel_off + osrel_sz
361 cmdline_off = cmdline_off + align - cmdline_off % align
362 cmdline_sz = os.stat(cmdline.name).st_size
363
364 dtb_off = cmdline_off + cmdline_sz
365 dtb_off = dtb_off + align - dtb_off % align
366
367 dtb = source_params.get('dtb')
368 if dtb:
369 if ';' in dtb:
370 raise WicError("Only one DTB supported, exiting")
371 dtb_path = "%s/%s" % (deploy_dir, dtb)
372 dtb_params = '--add-section .dtb=%s --change-section-vma .dtb=0x%x' % \
373 (dtb_path, dtb_off)
374 linux_off = dtb_off + os.stat(dtb_path).st_size
375 linux_off = linux_off + align - linux_off % align
376 else:
377 dtb_params = ''
378 linux_off = dtb_off
379
380 linux_path = "%s/%s" % (staging_kernel_dir, kernel)
381 linux_sz = os.stat(linux_path).st_size
382
383 initrd_off = linux_off + linux_sz
384 initrd_off = initrd_off + align - initrd_off % align
385
386 # https://www.freedesktop.org/software/systemd/man/systemd-stub.html
387 objcopy_cmd = "%s-objcopy" % target_sys
388 objcopy_cmd += " --enable-deterministic-archives"
389 objcopy_cmd += " --preserve-dates"
390 objcopy_cmd += " --add-section .osrel=%s" % osrel_path
391 objcopy_cmd += " --change-section-vma .osrel=0x%x" % osrel_off
392 objcopy_cmd += " --add-section .cmdline=%s" % cmdline.name
393 objcopy_cmd += " --change-section-vma .cmdline=0x%x" % cmdline_off
394 objcopy_cmd += dtb_params
395 objcopy_cmd += " --add-section .linux=%s" % linux_path
396 objcopy_cmd += " --change-section-vma .linux=0x%x" % linux_off
397 objcopy_cmd += " --add-section .initrd=%s" % initrd.name
398 objcopy_cmd += " --change-section-vma .initrd=0x%x" % initrd_off
399 objcopy_cmd += " %s %s/EFI/Linux/linux.efi" % (efi_stub, hdddir)
400
401 exec_native_cmd(objcopy_cmd, native_sysroot)
402 else:
403 if source_params.get('install-kernel-into-boot-dir') != 'false':
404 install_cmd = "install -m 0644 %s/%s %s/%s" % \
405 (staging_kernel_dir, kernel, hdddir, kernel)
406 exec_cmd(install_cmd)
407 321
408 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): 322 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"):
409 for src_path, dst_path in cls.install_task: 323 for src_path, dst_path in cls.install_task:
410 install_cmd = "install -m 0644 -D %s %s" \ 324 install_cmd = "install -v -p -m 0644 -D %s %s" \
411 % (os.path.join(kernel_dir, src_path), 325 % (os.path.join(kernel_dir, src_path),
412 os.path.join(hdddir, dst_path)) 326 os.path.join(hdddir, dst_path))
413 exec_cmd(install_cmd) 327 out = exec_cmd(install_cmd)
328 logger.debug("Installed IMAGE_EFI_BOOT_FILES:\n%s" % out)
414 329
415 try: 330 try:
416 if source_params['loader'] == 'grub-efi': 331 if source_params['loader'] == 'grub-efi':
417 shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, 332 shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir,
418 "%s/grub.cfg" % cr_workdir) 333 "%s/grub.cfg" % cr_workdir)
419 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: 334 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]:
420 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) 335 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:])
421 exec_cmd(cp_cmd, True) 336 exec_cmd(cp_cmd, True)
422 shutil.move("%s/grub.cfg" % cr_workdir, 337 shutil.move("%s/grub.cfg" % cr_workdir,
423 "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) 338 "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir)
424 elif source_params['loader'] == 'systemd-boot': 339 elif source_params['loader'] == 'systemd-boot':
425 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: 340 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]:
426 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) 341 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:])
427 exec_cmd(cp_cmd, True) 342 out = exec_cmd(cp_cmd, True)
343 logger.debug("systemd-boot files:\n%s" % out)
428 elif source_params['loader'] == 'uefi-kernel': 344 elif source_params['loader'] == 'uefi-kernel':
429 kernel = get_bitbake_var("KERNEL_IMAGETYPE") 345 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
430 if not kernel: 346 if not kernel:
431 raise WicError("Empty KERNEL_IMAGETYPE %s\n" % target) 347 raise WicError("Empty KERNEL_IMAGETYPE")
432 target = get_bitbake_var("TARGET_SYS") 348 target = get_bitbake_var("TARGET_SYS")
433 if not target: 349 if not target:
434 raise WicError("Unknown arch (TARGET_SYS) %s\n" % target) 350 raise WicError("Empty TARGET_SYS")
435 351
436 if re.match("x86_64", target): 352 if re.match("x86_64", target):
437 kernel_efi_image = "bootx64.efi" 353 kernel_efi_image = "bootx64.efi"
@@ -445,23 +361,33 @@ class BootimgEFIPlugin(SourcePlugin):
445 raise WicError("UEFI stub kernel is incompatible with target %s" % target) 361 raise WicError("UEFI stub kernel is incompatible with target %s" % target)
446 362
447 for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: 363 for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]:
448 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) 364 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image)
449 exec_cmd(cp_cmd, True) 365 out = exec_cmd(cp_cmd, True)
366 logger.debug("uefi-kernel files:\n%s" % out)
450 else: 367 else:
451 raise WicError("unrecognized bootimg-efi loader: %s" % 368 raise WicError("unrecognized bootimg_efi loader: %s" %
452 source_params['loader']) 369 source_params['loader'])
370
371 # must have installed at least one EFI bootloader
372 out = glob(os.path.join(hdddir, 'EFI', 'BOOT', 'boot*.efi'))
373 logger.debug("Installed EFI loader files:\n%s" % out)
374 if not out:
375 raise WicError("No EFI loaders installed to ESP partition. Check that grub-efi, systemd-boot or similar is installed.")
376
453 except KeyError: 377 except KeyError:
454 raise WicError("bootimg-efi requires a loader, none specified") 378 raise WicError("bootimg_efi requires a loader, none specified")
455 379
456 startup = os.path.join(kernel_dir, "startup.nsh") 380 startup = os.path.join(kernel_dir, "startup.nsh")
457 if os.path.exists(startup): 381 if os.path.exists(startup):
458 cp_cmd = "cp %s %s/" % (startup, hdddir) 382 cp_cmd = "cp -v -p %s %s/" % (startup, hdddir)
459 exec_cmd(cp_cmd, True) 383 out = exec_cmd(cp_cmd, True)
384 logger.debug("startup files:\n%s" % out)
460 385
461 for paths in part.include_path or []: 386 for paths in part.include_path or []:
462 for path in paths: 387 for path in paths:
463 cp_cmd = "cp -r %s %s/" % (path, hdddir) 388 cp_cmd = "cp -v -p -r %s %s/" % (path, hdddir)
464 exec_cmd(cp_cmd, True) 389 exec_cmd(cp_cmd, True)
390 logger.debug("include_path files:\n%s" % out)
465 391
466 du_cmd = "du -bks %s" % hdddir 392 du_cmd = "du -bks %s" % hdddir
467 out = exec_cmd(du_cmd) 393 out = exec_cmd(du_cmd)
@@ -489,12 +415,14 @@ class BootimgEFIPlugin(SourcePlugin):
489 415
490 label = part.label if part.label else "ESP" 416 label = part.label if part.label else "ESP"
491 417
492 dosfs_cmd = "mkdosfs -n %s -i %s -C %s %d" % \ 418 dosfs_cmd = "mkdosfs -v -n %s -i %s -C %s %d" % \
493 (label, part.fsuuid, bootimg, blocks) 419 (label, part.fsuuid, bootimg, blocks)
494 exec_native_cmd(dosfs_cmd, native_sysroot) 420 exec_native_cmd(dosfs_cmd, native_sysroot)
421 logger.debug("mkdosfs:\n%s" % (str(out)))
495 422
496 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) 423 mcopy_cmd = "mcopy -v -p -i %s -s %s/* ::/" % (bootimg, hdddir)
497 exec_native_cmd(mcopy_cmd, native_sysroot) 424 out = exec_native_cmd(mcopy_cmd, native_sysroot)
425 logger.debug("mcopy:\n%s" % (str(out)))
498 426
499 chmod_cmd = "chmod 644 %s" % bootimg 427 chmod_cmd = "chmod 644 %s" % bootimg
500 exec_cmd(chmod_cmd) 428 exec_cmd(chmod_cmd)
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg_partition.py
index 1071d1af3f..cc121a78f0 100644
--- a/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/scripts/lib/wic/plugins/source/bootimg_partition.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'bootimg-partition' source plugin class for 7# This implements the 'bootimg_partition' source plugin class for
8# 'wic'. The plugin creates an image of boot partition, copying over 8# 'wic'. The plugin creates an image of boot partition, copying over
9# files listed in IMAGE_BOOT_FILES bitbake variable. 9# files listed in IMAGE_BOOT_FILES bitbake variable.
10# 10#
@@ -16,7 +16,7 @@ import logging
16import os 16import os
17import re 17import re
18 18
19from glob import glob 19from oe.bootfiles import get_boot_files
20 20
21from wic import WicError 21from wic import WicError
22from wic.engine import get_custom_config 22from wic.engine import get_custom_config
@@ -31,7 +31,7 @@ class BootimgPartitionPlugin(SourcePlugin):
31 listed in IMAGE_BOOT_FILES bitbake variable. 31 listed in IMAGE_BOOT_FILES bitbake variable.
32 """ 32 """
33 33
34 name = 'bootimg-partition' 34 name = 'bootimg_partition'
35 image_boot_files_var_name = 'IMAGE_BOOT_FILES' 35 image_boot_files_var_name = 'IMAGE_BOOT_FILES'
36 36
37 @classmethod 37 @classmethod
@@ -66,42 +66,7 @@ class BootimgPartitionPlugin(SourcePlugin):
66 66
67 logger.debug('Boot files: %s', boot_files) 67 logger.debug('Boot files: %s', boot_files)
68 68
69 # list of tuples (src_name, dst_name) 69 cls.install_task = get_boot_files(kernel_dir, boot_files)
70 deploy_files = []
71 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
72 if ';' in src_entry:
73 dst_entry = tuple(src_entry.split(';'))
74 if not dst_entry[0] or not dst_entry[1]:
75 raise WicError('Malformed boot file entry: %s' % src_entry)
76 else:
77 dst_entry = (src_entry, src_entry)
78
79 logger.debug('Destination entry: %r', dst_entry)
80 deploy_files.append(dst_entry)
81
82 cls.install_task = [];
83 for deploy_entry in deploy_files:
84 src, dst = deploy_entry
85 if '*' in src:
86 # by default install files under their basename
87 entry_name_fn = os.path.basename
88 if dst != src:
89 # unless a target name was given, then treat name
90 # as a directory and append a basename
91 entry_name_fn = lambda name: \
92 os.path.join(dst,
93 os.path.basename(name))
94
95 srcs = glob(os.path.join(kernel_dir, src))
96
97 logger.debug('Globbed sources: %s', ', '.join(srcs))
98 for entry in srcs:
99 src = os.path.relpath(entry, kernel_dir)
100 entry_dst_name = entry_name_fn(entry)
101 cls.install_task.append((src, entry_dst_name))
102 else:
103 cls.install_task.append((src, dst))
104
105 if source_params.get('loader') != "u-boot": 70 if source_params.get('loader') != "u-boot":
106 return 71 return
107 72
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg_pcbios.py
index a207a83530..21f41e00bb 100644
--- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/scripts/lib/wic/plugins/source/bootimg_pcbios.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'bootimg-pcbios' source plugin class for 'wic' 7# This implements the 'bootimg_pcbios' source plugin class for 'wic'
8# 8#
9# AUTHORS 9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com> 10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
@@ -27,7 +27,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
27 Create MBR boot partition and install syslinux on it. 27 Create MBR boot partition and install syslinux on it.
28 """ 28 """
29 29
30 name = 'bootimg-pcbios' 30 name = 'bootimg_pcbios'
31 31
32 @classmethod 32 @classmethod
33 def _get_bootimg_dir(cls, bootimg_dir, dirname): 33 def _get_bootimg_dir(cls, bootimg_dir, dirname):
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py
index 607356ad13..5d42eb5d3e 100644
--- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'isoimage-isohybrid' source plugin class for 'wic' 7# This implements the 'isoimage_isohybrid' source plugin class for 'wic'
8# 8#
9# AUTHORS 9# AUTHORS
10# Mihaly Varga <mihaly.varga (at] ni.com> 10# Mihaly Varga <mihaly.varga (at] ni.com>
@@ -35,7 +35,7 @@ class IsoImagePlugin(SourcePlugin):
35 bootloader files. 35 bootloader files.
36 36
37 Example kickstart file: 37 Example kickstart file:
38 part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi, \\ 38 part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi, \\
39 image_name= IsoImage" --ondisk cd --label LIVECD 39 image_name= IsoImage" --ondisk cd --label LIVECD
40 bootloader --timeout=10 --append=" " 40 bootloader --timeout=10 --append=" "
41 41
@@ -45,7 +45,7 @@ class IsoImagePlugin(SourcePlugin):
45 extension added by direct imeger plugin) and a file named IsoImage-cd.iso 45 extension added by direct imeger plugin) and a file named IsoImage-cd.iso
46 """ 46 """
47 47
48 name = 'isoimage-isohybrid' 48 name = 'isoimage_isohybrid'
49 49
50 @classmethod 50 @classmethod
51 def do_configure_syslinux(cls, creator, cr_workdir): 51 def do_configure_syslinux(cls, creator, cr_workdir):
@@ -340,10 +340,10 @@ class IsoImagePlugin(SourcePlugin):
340 cls.do_configure_grubefi(part, creator, target_dir) 340 cls.do_configure_grubefi(part, creator, target_dir)
341 341
342 else: 342 else:
343 raise WicError("unrecognized bootimg-efi loader: %s" % 343 raise WicError("unrecognized bootimg_efi loader: %s" %
344 source_params['loader']) 344 source_params['loader'])
345 except KeyError: 345 except KeyError:
346 raise WicError("bootimg-efi requires a loader, none specified") 346 raise WicError("bootimg_efi requires a loader, none specified")
347 347
348 # Create efi.img that contains bootloader files for EFI booting 348 # Create efi.img that contains bootloader files for EFI booting
349 # if ISODIR didn't exist or didn't contains it 349 # if ISODIR didn't exist or didn't contains it
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py
index e29f3a4c2f..06fce06bb1 100644
--- a/scripts/lib/wic/plugins/source/rootfs.py
+++ b/scripts/lib/wic/plugins/source/rootfs.py
@@ -41,9 +41,9 @@ class RootfsPlugin(SourcePlugin):
41 # Disallow climbing outside of parent directory using '..', 41 # Disallow climbing outside of parent directory using '..',
42 # because doing so could be quite disastrous (we will delete the 42 # because doing so could be quite disastrous (we will delete the
43 # directory, or modify a directory outside OpenEmbedded). 43 # directory, or modify a directory outside OpenEmbedded).
44 full_path = os.path.realpath(os.path.join(rootfs_dir, path)) 44 full_path = os.path.abspath(os.path.join(rootfs_dir, path))
45 if not full_path.startswith(os.path.realpath(rootfs_dir)): 45 if not full_path.startswith(os.path.realpath(rootfs_dir)):
46 logger.error("%s: Must point inside the rootfs:" % (cmd, path)) 46 logger.error("%s: Must point inside the rootfs: %s" % (cmd, path))
47 sys.exit(1) 47 sys.exit(1)
48 48
49 return full_path 49 return full_path