diff options
Diffstat (limited to 'scripts/lib')
65 files changed, 5208 insertions, 1757 deletions
diff --git a/scripts/lib/argparse_oe.py b/scripts/lib/argparse_oe.py index 94a4ac5011..176b732bbc 100644 --- a/scripts/lib/argparse_oe.py +++ b/scripts/lib/argparse_oe.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html index 65f1a227ad..86435273cf 100644 --- a/scripts/lib/build_perf/html/measurement_chart.html +++ b/scripts/lib/build_perf/html/measurement_chart.html | |||
@@ -1,50 +1,168 @@ | |||
1 | <script type="text/javascript"> | 1 | <script type="module"> |
2 | chartsDrawing += 1; | 2 | // Get raw data |
3 | google.charts.setOnLoadCallback(drawChart_{{ chart_elem_id }}); | 3 | const rawData = [ |
4 | function drawChart_{{ chart_elem_id }}() { | 4 | {% for sample in measurement.samples %} |
5 | var data = new google.visualization.DataTable(); | 5 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'], |
6 | 6 | {% endfor %} | |
7 | // Chart options | 7 | ]; |
8 | var options = { | 8 | |
9 | theme : 'material', | 9 | const convertToMinute = (time) => { |
10 | legend: 'none', | 10 | return time[0]*60 + time[1] + time[2]/60 + time[3]/3600; |
11 | hAxis: { format: '', title: 'Commit number', | 11 | } |
12 | minValue: {{ chart_opts.haxis.min }}, | 12 | |
13 | maxValue: {{ chart_opts.haxis.max }} }, | 13 | // Update value format to either minutes or leave as size value |
14 | {% if measurement.type == 'time' %} | 14 | const updateValue = (value) => { |
15 | vAxis: { format: 'h:mm:ss' }, | 15 | // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds] |
16 | {% else %} | 16 | return Array.isArray(value) ? convertToMinute(value) : value |
17 | vAxis: { format: '' }, | 17 | } |
18 | {% endif %} | 18 | |
19 | pointSize: 5, | 19 | // Convert raw data to the format: [time, value] |
20 | chartArea: { left: 80, right: 15 }, | 20 | const data = rawData.map(([commit, value, time]) => { |
21 | }; | 21 | return [ |
22 | 22 | // The Date object takes values in milliseconds rather than seconds. So to use a Unix timestamp we have to multiply it by 1000. | |
23 | // Define data columns | 23 | new Date(time * 1000).getTime(), |
24 | data.addColumn('number', 'Commit'); | 24 | // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds] |
25 | data.addColumn('{{ measurement.value_type.gv_data_type }}', | 25 | updateValue(value) |
26 | '{{ measurement.value_type.quantity }}'); | 26 | ] |
27 | // Add data rows | 27 | }); |
28 | data.addRows([ | 28 | |
29 | {% for sample in measurement.samples %} | 29 | const commitCountList = rawData.map(([commit, value, time]) => { |
30 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}], | 30 | return commit |
31 | {% endfor %} | 31 | }); |
32 | ]); | 32 | |
33 | 33 | const commitCountData = rawData.map(([commit, value, time]) => { | |
34 | // Finally, draw the chart | 34 | return updateValue(value) |
35 | chart_div = document.getElementById('{{ chart_elem_id }}'); | 35 | }); |
36 | var chart = new google.visualization.LineChart(chart_div); | 36 | |
37 | google.visualization.events.addListener(chart, 'ready', function () { | 37 | // Set chart options |
38 | //chart_div = document.getElementById('{{ chart_elem_id }}'); | 38 | const option_start_time = { |
39 | //chart_div.innerHTML = '<img src="' + chart.getImageURI() + '">'; | 39 | tooltip: { |
40 | png_div = document.getElementById('{{ chart_elem_id }}_png'); | 40 | trigger: 'axis', |
41 | png_div.outerHTML = '<a id="{{ chart_elem_id }}_png" href="' + chart.getImageURI() + '">PNG</a>'; | 41 | enterable: true, |
42 | console.log("CHART READY: {{ chart_elem_id }}"); | 42 | position: function (point, params, dom, rect, size) { |
43 | chartsDrawing -= 1; | 43 | return [point[0], '0%']; |
44 | if (chartsDrawing == 0) | 44 | }, |
45 | console.log("ALL CHARTS READY"); | 45 | formatter: function (param) { |
46 | const value = param[0].value[1] | ||
47 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
48 | const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)'); | ||
49 | |||
50 | // Add commit hash to the tooltip as a link | ||
51 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
52 | if ('{{ measurement.value_type.quantity }}' == 'time') { | ||
53 | const hours = Math.floor(value/60) | ||
54 | const minutes = Math.floor(value % 60) | ||
55 | const seconds = Math.floor((value * 60) % 60) | ||
56 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
57 | } | ||
58 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
59 | ;} | ||
60 | }, | ||
61 | xAxis: { | ||
62 | type: 'time', | ||
63 | }, | ||
64 | yAxis: { | ||
65 | name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB', | ||
66 | type: 'value', | ||
67 | min: function(value) { | ||
68 | return Math.round(value.min - 0.5); | ||
69 | }, | ||
70 | max: function(value) { | ||
71 | return Math.round(value.max + 0.5); | ||
72 | } | ||
73 | }, | ||
74 | dataZoom: [ | ||
75 | { | ||
76 | type: 'slider', | ||
77 | xAxisIndex: 0, | ||
78 | filterMode: 'none' | ||
79 | }, | ||
80 | ], | ||
81 | series: [ | ||
82 | { | ||
83 | name: '{{ measurement.value_type.quantity }}', | ||
84 | type: 'line', | ||
85 | symbol: 'none', | ||
86 | data: data | ||
87 | } | ||
88 | ] | ||
89 | }; | ||
90 | |||
91 | const option_commit_count = { | ||
92 | tooltip: { | ||
93 | trigger: 'axis', | ||
94 | enterable: true, | ||
95 | position: function (point, params, dom, rect, size) { | ||
96 | return [point[0], '0%']; | ||
97 | }, | ||
98 | formatter: function (param) { | ||
99 | const value = param[0].value | ||
100 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
101 | const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)'); | ||
102 | // Add commit hash to the tooltip as a link | ||
103 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
104 | if ('{{ measurement.value_type.quantity }}' == 'time') { | ||
105 | const hours = Math.floor(value/60) | ||
106 | const minutes = Math.floor(value % 60) | ||
107 | const seconds = Math.floor((value * 60) % 60) | ||
108 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
109 | } | ||
110 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
111 | ;} | ||
112 | }, | ||
113 | xAxis: { | ||
114 | name: 'Commit count', | ||
115 | type: 'category', | ||
116 | data: commitCountList | ||
117 | }, | ||
118 | yAxis: { | ||
119 | name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB', | ||
120 | type: 'value', | ||
121 | min: function(value) { | ||
122 | return Math.round(value.min - 0.5); | ||
123 | }, | ||
124 | max: function(value) { | ||
125 | return Math.round(value.max + 0.5); | ||
126 | } | ||
127 | }, | ||
128 | dataZoom: [ | ||
129 | { | ||
130 | type: 'slider', | ||
131 | xAxisIndex: 0, | ||
132 | filterMode: 'none' | ||
133 | }, | ||
134 | ], | ||
135 | series: [ | ||
136 | { | ||
137 | name: '{{ measurement.value_type.quantity }}', | ||
138 | type: 'line', | ||
139 | symbol: 'none', | ||
140 | data: commitCountData | ||
141 | } | ||
142 | ] | ||
143 | }; | ||
144 | |||
145 | // Draw chart | ||
146 | const draw_chart = (chart_id, option) => { | ||
147 | let chart_name | ||
148 | const chart_div = document.getElementById(chart_id); | ||
149 | // Set dark mode | ||
150 | if (window.matchMedia('(prefers-color-scheme: dark)').matches) { | ||
151 | chart_name= echarts.init(chart_div, 'dark', { | ||
152 | height: 320 | ||
153 | }); | ||
154 | } else { | ||
155 | chart_name= echarts.init(chart_div, null, { | ||
156 | height: 320 | ||
157 | }); | ||
158 | } | ||
159 | // Change chart size with browser resize | ||
160 | window.addEventListener('resize', function() { | ||
161 | chart_name.resize(); | ||
46 | }); | 162 | }); |
47 | chart.draw(data, options); | 163 | return chart_name.setOption(option); |
48 | } | 164 | } |
49 | </script> | ||
50 | 165 | ||
166 | draw_chart('{{ chart_elem_start_time_id }}', option_start_time) | ||
167 | draw_chart('{{ chart_elem_commit_count_id }}', option_commit_count) | ||
168 | </script> | ||
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html index d1ba6f2578..28cd80e738 100644 --- a/scripts/lib/build_perf/html/report.html +++ b/scripts/lib/build_perf/html/report.html | |||
@@ -3,17 +3,14 @@ | |||
3 | <head> | 3 | <head> |
4 | {# Scripts, for visualization#} | 4 | {# Scripts, for visualization#} |
5 | <!--START-OF-SCRIPTS--> | 5 | <!--START-OF-SCRIPTS--> |
6 | <script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script> | 6 | <script src=" https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js "></script> |
7 | <script type="text/javascript"> | ||
8 | google.charts.load('current', {'packages':['corechart']}); | ||
9 | var chartsDrawing = 0; | ||
10 | </script> | ||
11 | 7 | ||
12 | {# Render measurement result charts #} | 8 | {# Render measurement result charts #} |
13 | {% for test in test_data %} | 9 | {% for test in test_data %} |
14 | {% if test.status == 'SUCCESS' %} | 10 | {% if test.status == 'SUCCESS' %} |
15 | {% for measurement in test.measurements %} | 11 | {% for measurement in test.measurements %} |
16 | {% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %} | 12 | {% set chart_elem_start_time_id = test.name + '_' + measurement.name + '_chart_start_time' %} |
13 | {% set chart_elem_commit_count_id = test.name + '_' + measurement.name + '_chart_commit_count' %} | ||
17 | {% include 'measurement_chart.html' %} | 14 | {% include 'measurement_chart.html' %} |
18 | {% endfor %} | 15 | {% endfor %} |
19 | {% endif %} | 16 | {% endif %} |
@@ -23,28 +20,29 @@ var chartsDrawing = 0; | |||
23 | 20 | ||
24 | {# Styles #} | 21 | {# Styles #} |
25 | <style> | 22 | <style> |
23 | :root { | ||
24 | --text: #000; | ||
25 | --bg: #fff; | ||
26 | --h2heading: #707070; | ||
27 | --link: #0000EE; | ||
28 | --trtopborder: #9ca3af; | ||
29 | --trborder: #e5e7eb; | ||
30 | --chartborder: #f0f0f0; | ||
31 | } | ||
26 | .meta-table { | 32 | .meta-table { |
27 | font-size: 14px; | 33 | font-size: 14px; |
28 | text-align: left; | 34 | text-align: left; |
29 | border-collapse: collapse; | 35 | border-collapse: collapse; |
30 | } | 36 | } |
31 | .meta-table tr:nth-child(even){background-color: #f2f2f2} | ||
32 | meta-table th, .meta-table td { | ||
33 | padding: 4px; | ||
34 | } | ||
35 | .summary { | 37 | .summary { |
36 | margin: 0; | ||
37 | font-size: 14px; | 38 | font-size: 14px; |
38 | text-align: left; | 39 | text-align: left; |
39 | border-collapse: collapse; | 40 | border-collapse: collapse; |
40 | } | 41 | } |
41 | summary th, .meta-table td { | ||
42 | padding: 4px; | ||
43 | } | ||
44 | .measurement { | 42 | .measurement { |
45 | padding: 8px 0px 8px 8px; | 43 | padding: 8px 0px 8px 8px; |
46 | border: 2px solid #f0f0f0; | 44 | border: 2px solid var(--chartborder); |
47 | margin-bottom: 10px; | 45 | margin: 1.5rem 0; |
48 | } | 46 | } |
49 | .details { | 47 | .details { |
50 | margin: 0; | 48 | margin: 0; |
@@ -64,18 +62,97 @@ summary th, .meta-table td { | |||
64 | background-color: #f0f0f0; | 62 | background-color: #f0f0f0; |
65 | margin-left: 10px; | 63 | margin-left: 10px; |
66 | } | 64 | } |
67 | hr { | 65 | .card-container { |
68 | color: #f0f0f0; | 66 | border-bottom-width: 1px; |
67 | padding: 1.25rem 3rem; | ||
68 | box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1); | ||
69 | border-radius: 0.25rem; | ||
70 | } | ||
71 | body { | ||
72 | font-family: 'Helvetica', sans-serif; | ||
73 | margin: 3rem 8rem; | ||
74 | background-color: var(--bg); | ||
75 | color: var(--text); | ||
76 | } | ||
77 | h1 { | ||
78 | text-align: center; | ||
69 | } | 79 | } |
70 | h2 { | 80 | h2 { |
71 | font-size: 20px; | 81 | font-size: 1.5rem; |
72 | margin-bottom: 0px; | 82 | margin-bottom: 0px; |
73 | color: #707070; | 83 | color: var(--h2heading); |
84 | padding-top: 1.5rem; | ||
74 | } | 85 | } |
75 | h3 { | 86 | h3 { |
76 | font-size: 16px; | 87 | font-size: 1.3rem; |
77 | margin: 0px; | 88 | margin: 0px; |
78 | color: #707070; | 89 | color: var(--h2heading); |
90 | padding: 1.5rem 0; | ||
91 | } | ||
92 | h4 { | ||
93 | font-size: 14px; | ||
94 | font-weight: lighter; | ||
95 | line-height: 1.2rem; | ||
96 | margin: auto; | ||
97 | padding-top: 1rem; | ||
98 | } | ||
99 | table { | ||
100 | margin-top: 1.5rem; | ||
101 | line-height: 2rem; | ||
102 | } | ||
103 | tr { | ||
104 | border-bottom: 1px solid var(--trborder); | ||
105 | } | ||
106 | tr:first-child { | ||
107 | border-bottom: 1px solid var(--trtopborder); | ||
108 | } | ||
109 | tr:last-child { | ||
110 | border-bottom: none; | ||
111 | } | ||
112 | a { | ||
113 | text-decoration: none; | ||
114 | font-weight: bold; | ||
115 | color: var(--link); | ||
116 | } | ||
117 | a:hover { | ||
118 | color: #8080ff; | ||
119 | } | ||
120 | button { | ||
121 | background-color: #F3F4F6; | ||
122 | border: none; | ||
123 | outline: none; | ||
124 | cursor: pointer; | ||
125 | padding: 10px 12px; | ||
126 | transition: 0.3s; | ||
127 | border-radius: 8px; | ||
128 | color: #3A4353; | ||
129 | } | ||
130 | button:hover { | ||
131 | background-color: #d6d9e0; | ||
132 | } | ||
133 | .tab button.active { | ||
134 | background-color: #d6d9e0; | ||
135 | } | ||
136 | @media (prefers-color-scheme: dark) { | ||
137 | :root { | ||
138 | --text: #e9e8fa; | ||
139 | --bg: #0F0C28; | ||
140 | --h2heading: #B8B7CB; | ||
141 | --link: #87cefa; | ||
142 | --trtopborder: #394150; | ||
143 | --trborder: #212936; | ||
144 | --chartborder: #b1b0bf; | ||
145 | } | ||
146 | button { | ||
147 | background-color: #28303E; | ||
148 | color: #fff; | ||
149 | } | ||
150 | button:hover { | ||
151 | background-color: #545a69; | ||
152 | } | ||
153 | .tab button.active { | ||
154 | background-color: #545a69; | ||
155 | } | ||
79 | } | 156 | } |
80 | </style> | 157 | </style> |
81 | 158 | ||
@@ -83,13 +160,14 @@ h3 { | |||
83 | </head> | 160 | </head> |
84 | 161 | ||
85 | {% macro poky_link(commit) -%} | 162 | {% macro poky_link(commit) -%} |
86 | <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> | 163 | <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> |
87 | {%- endmacro %} | 164 | {%- endmacro %} |
88 | 165 | ||
89 | <body><div style="width: 700px"> | 166 | <body><div> |
167 | <h1 style="text-align: center;">Performance Test Report</h1> | ||
90 | {# Test metadata #} | 168 | {# Test metadata #} |
91 | <h2>General</h2> | 169 | <h2>General</h2> |
92 | <hr> | 170 | <h4>The table provides an overview of the comparison between two selected commits from the same branch.</h4> |
93 | <table class="meta-table" style="width: 100%"> | 171 | <table class="meta-table" style="width: 100%"> |
94 | <tr> | 172 | <tr> |
95 | <th></th> | 173 | <th></th> |
@@ -112,19 +190,21 @@ h3 { | |||
112 | 190 | ||
113 | {# Test result summary #} | 191 | {# Test result summary #} |
114 | <h2>Test result summary</h2> | 192 | <h2>Test result summary</h2> |
115 | <hr> | 193 | <h4>The test summary presents a thorough breakdown of each test conducted on the branch, including details such as build time and disk space consumption. Additionally, it gives insights into the average time taken for test execution, along with absolute and relative values for a better understanding.</h4> |
116 | <table class="summary" style="width: 100%"> | 194 | <table class="summary" style="width: 100%"> |
195 | <tr> | ||
196 | <th>Test name</th> | ||
197 | <th>Measurement description</th> | ||
198 | <th>Mean value</th> | ||
199 | <th>Absolute difference</th> | ||
200 | <th>Relative difference</th> | ||
201 | </tr> | ||
117 | {% for test in test_data %} | 202 | {% for test in test_data %} |
118 | {% if loop.index is even %} | ||
119 | {% set row_style = 'style="background-color: #f2f2f2"' %} | ||
120 | {% else %} | ||
121 | {% set row_style = 'style="background-color: #ffffff"' %} | ||
122 | {% endif %} | ||
123 | {% if test.status == 'SUCCESS' %} | 203 | {% if test.status == 'SUCCESS' %} |
124 | {% for measurement in test.measurements %} | 204 | {% for measurement in test.measurements %} |
125 | <tr {{ row_style }}> | 205 | <tr {{ row_style }}> |
126 | {% if loop.index == 1 %} | 206 | {% if loop.index == 1 %} |
127 | <td>{{ test.name }}: {{ test.description }}</td> | 207 | <td><a href=#{{test.name}}>{{ test.name }}: {{ test.description }}</a></td> |
128 | {% else %} | 208 | {% else %} |
129 | {# add empty cell in place of the test name#} | 209 | {# add empty cell in place of the test name#} |
130 | <td></td> | 210 | <td></td> |
@@ -153,10 +233,12 @@ h3 { | |||
153 | </table> | 233 | </table> |
154 | 234 | ||
155 | {# Detailed test results #} | 235 | {# Detailed test results #} |
236 | <h2>Test details</h2> | ||
237 | <h4>The following section provides details of each test, accompanied by charts representing build time and disk usage over time or by commit number.</h4> | ||
156 | {% for test in test_data %} | 238 | {% for test in test_data %} |
157 | <h2>{{ test.name }}: {{ test.description }}</h2> | 239 | <h3 style="color: #000;" id={{test.name}}>{{ test.name }}: {{ test.description }}</h3> |
158 | <hr> | ||
159 | {% if test.status == 'SUCCESS' %} | 240 | {% if test.status == 'SUCCESS' %} |
241 | <div class="card-container"> | ||
160 | {% for measurement in test.measurements %} | 242 | {% for measurement in test.measurements %} |
161 | <div class="measurement"> | 243 | <div class="measurement"> |
162 | <h3>{{ measurement.description }}</h3> | 244 | <h3>{{ measurement.description }}</h3> |
@@ -178,7 +260,18 @@ h3 { | |||
178 | <tr> | 260 | <tr> |
179 | <td style="width: 75%"> | 261 | <td style="width: 75%"> |
180 | {# Linechart #} | 262 | {# Linechart #} |
181 | <div id="{{ test.name }}_{{ measurement.name }}_chart"></div> | 263 | <div class="tab {{ test.name }}_{{ measurement.name }}_tablinks"> |
264 | <button class="tablinks active" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_start_time', '{{ test.name }}_{{ measurement.name }}')">Chart with start time</button> | ||
265 | <button class="tablinks" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_commit_count', '{{ test.name }}_{{ measurement.name }}')">Chart with commit count</button> | ||
266 | </div> | ||
267 | <div class="{{ test.name }}_{{ measurement.name }}_tabcontent"> | ||
268 | <div id="{{ test.name }}_{{ measurement.name }}_start_time" class="tabcontent" style="display: block;"> | ||
269 | <div id="{{ test.name }}_{{ measurement.name }}_chart_start_time"></div> | ||
270 | </div> | ||
271 | <div id="{{ test.name }}_{{ measurement.name }}_commit_count" class="tabcontent" style="display: none;"> | ||
272 | <div id="{{ test.name }}_{{ measurement.name }}_chart_commit_count"></div> | ||
273 | </div> | ||
274 | </div> | ||
182 | </td> | 275 | </td> |
183 | <td> | 276 | <td> |
184 | {# Measurement statistics #} | 277 | {# Measurement statistics #} |
@@ -275,7 +368,8 @@ h3 { | |||
275 | {% endif %} | 368 | {% endif %} |
276 | {% endif %} | 369 | {% endif %} |
277 | </div> | 370 | </div> |
278 | {% endfor %} | 371 | {% endfor %} |
372 | </div> | ||
279 | {# Unsuccessful test #} | 373 | {# Unsuccessful test #} |
280 | {% else %} | 374 | {% else %} |
281 | <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} | 375 | <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} |
@@ -284,6 +378,31 @@ h3 { | |||
284 | <div class="preformatted">{{ test.message }}</div> | 378 | <div class="preformatted">{{ test.message }}</div> |
285 | {% endif %} | 379 | {% endif %} |
286 | {% endfor %} | 380 | {% endfor %} |
287 | </div></body> | 381 | </div> |
288 | </html> | ||
289 | 382 | ||
383 | <script> | ||
384 | function openChart(event, chartType, chartName) { | ||
385 | let i, tabcontents, tablinks | ||
386 | tabcontents = document.querySelectorAll(`.${chartName}_tabcontent > .tabcontent`); | ||
387 | tabcontents.forEach((tabcontent) => { | ||
388 | tabcontent.style.display = "none"; | ||
389 | }); | ||
390 | |||
391 | tablinks = document.querySelectorAll(`.${chartName}_tablinks > .tablinks`); | ||
392 | tablinks.forEach((tabLink) => { | ||
393 | tabLink.classList.remove('active'); | ||
394 | }); | ||
395 | |||
396 | const targetTab = document.getElementById(chartType) | ||
397 | targetTab.style.display = "block"; | ||
398 | |||
399 | // Call resize on the ECharts instance to redraw the chart | ||
400 | const chartContainer = targetTab.querySelector('div') | ||
401 | echarts.init(chartContainer).resize(); | ||
402 | |||
403 | event.currentTarget.classList.add('active'); | ||
404 | } | ||
405 | </script> | ||
406 | |||
407 | </body> | ||
408 | </html> | ||
diff --git a/scripts/lib/build_perf/report.py b/scripts/lib/build_perf/report.py index ab77424cc7..f4e6a92e09 100644 --- a/scripts/lib/build_perf/report.py +++ b/scripts/lib/build_perf/report.py | |||
@@ -294,7 +294,7 @@ class SizeVal(MeasurementVal): | |||
294 | return "null" | 294 | return "null" |
295 | return self / 1024 | 295 | return self / 1024 |
296 | 296 | ||
297 | def measurement_stats(meas, prefix=''): | 297 | def measurement_stats(meas, prefix='', time=0): |
298 | """Get statistics of a measurement""" | 298 | """Get statistics of a measurement""" |
299 | if not meas: | 299 | if not meas: |
300 | return {prefix + 'sample_cnt': 0, | 300 | return {prefix + 'sample_cnt': 0, |
@@ -319,6 +319,8 @@ def measurement_stats(meas, prefix=''): | |||
319 | stats['quantity'] = val_cls.quantity | 319 | stats['quantity'] = val_cls.quantity |
320 | stats[prefix + 'sample_cnt'] = len(values) | 320 | stats[prefix + 'sample_cnt'] = len(values) |
321 | 321 | ||
322 | # Add start time for both type sysres and disk usage | ||
323 | start_time = time | ||
322 | mean_val = val_cls(mean(values)) | 324 | mean_val = val_cls(mean(values)) |
323 | min_val = val_cls(min(values)) | 325 | min_val = val_cls(min(values)) |
324 | max_val = val_cls(max(values)) | 326 | max_val = val_cls(max(values)) |
@@ -334,6 +336,7 @@ def measurement_stats(meas, prefix=''): | |||
334 | stats[prefix + 'max'] = max_val | 336 | stats[prefix + 'max'] = max_val |
335 | stats[prefix + 'minus'] = val_cls(mean_val - min_val) | 337 | stats[prefix + 'minus'] = val_cls(mean_val - min_val) |
336 | stats[prefix + 'plus'] = val_cls(max_val - mean_val) | 338 | stats[prefix + 'plus'] = val_cls(max_val - mean_val) |
339 | stats[prefix + 'start_time'] = start_time | ||
337 | 340 | ||
338 | return stats | 341 | return stats |
339 | 342 | ||
diff --git a/scripts/lib/buildstats.py b/scripts/lib/buildstats.py index c69b5bf4d7..6db60d5bcf 100644 --- a/scripts/lib/buildstats.py +++ b/scripts/lib/buildstats.py | |||
@@ -8,7 +8,7 @@ import json | |||
8 | import logging | 8 | import logging |
9 | import os | 9 | import os |
10 | import re | 10 | import re |
11 | from collections import namedtuple,OrderedDict | 11 | from collections import namedtuple |
12 | from statistics import mean | 12 | from statistics import mean |
13 | 13 | ||
14 | 14 | ||
@@ -79,8 +79,8 @@ class BSTask(dict): | |||
79 | return self['rusage']['ru_oublock'] | 79 | return self['rusage']['ru_oublock'] |
80 | 80 | ||
81 | @classmethod | 81 | @classmethod |
82 | def from_file(cls, buildstat_file): | 82 | def from_file(cls, buildstat_file, fallback_end=0): |
83 | """Read buildstat text file""" | 83 | """Read buildstat text file. fallback_end is an optional end time for tasks that are not recorded as finishing.""" |
84 | bs_task = cls() | 84 | bs_task = cls() |
85 | log.debug("Reading task buildstats from %s", buildstat_file) | 85 | log.debug("Reading task buildstats from %s", buildstat_file) |
86 | end_time = None | 86 | end_time = None |
@@ -108,7 +108,10 @@ class BSTask(dict): | |||
108 | bs_task[ru_type][ru_key] = val | 108 | bs_task[ru_type][ru_key] = val |
109 | elif key == 'Status': | 109 | elif key == 'Status': |
110 | bs_task['status'] = val | 110 | bs_task['status'] = val |
111 | if end_time is not None and start_time is not None: | 111 | # If the task didn't finish, fill in the fallback end time if specified |
112 | if start_time and not end_time and fallback_end: | ||
113 | end_time = fallback_end | ||
114 | if start_time and end_time: | ||
112 | bs_task['elapsed_time'] = end_time - start_time | 115 | bs_task['elapsed_time'] = end_time - start_time |
113 | else: | 116 | else: |
114 | raise BSError("{} looks like a invalid buildstats file".format(buildstat_file)) | 117 | raise BSError("{} looks like a invalid buildstats file".format(buildstat_file)) |
@@ -226,25 +229,44 @@ class BuildStats(dict): | |||
226 | epoch = match.group('epoch') | 229 | epoch = match.group('epoch') |
227 | return name, epoch, version, revision | 230 | return name, epoch, version, revision |
228 | 231 | ||
232 | @staticmethod | ||
233 | def parse_top_build_stats(path): | ||
234 | """ | ||
235 | Parse the top-level build_stats file for build-wide start and duration. | ||
236 | """ | ||
237 | start = elapsed = 0 | ||
238 | with open(path) as fobj: | ||
239 | for line in fobj.readlines(): | ||
240 | key, val = line.split(':', 1) | ||
241 | val = val.strip() | ||
242 | if key == 'Build Started': | ||
243 | start = float(val) | ||
244 | elif key == "Elapsed time": | ||
245 | elapsed = float(val.split()[0]) | ||
246 | return start, elapsed | ||
247 | |||
229 | @classmethod | 248 | @classmethod |
230 | def from_dir(cls, path): | 249 | def from_dir(cls, path): |
231 | """Load buildstats from a buildstats directory""" | 250 | """Load buildstats from a buildstats directory""" |
232 | if not os.path.isfile(os.path.join(path, 'build_stats')): | 251 | top_stats = os.path.join(path, 'build_stats') |
252 | if not os.path.isfile(top_stats): | ||
233 | raise BSError("{} does not look like a buildstats directory".format(path)) | 253 | raise BSError("{} does not look like a buildstats directory".format(path)) |
234 | 254 | ||
235 | log.debug("Reading buildstats directory %s", path) | 255 | log.debug("Reading buildstats directory %s", path) |
236 | |||
237 | buildstats = cls() | 256 | buildstats = cls() |
257 | build_started, build_elapsed = buildstats.parse_top_build_stats(top_stats) | ||
258 | build_end = build_started + build_elapsed | ||
259 | |||
238 | subdirs = os.listdir(path) | 260 | subdirs = os.listdir(path) |
239 | for dirname in subdirs: | 261 | for dirname in subdirs: |
240 | recipe_dir = os.path.join(path, dirname) | 262 | recipe_dir = os.path.join(path, dirname) |
241 | if not os.path.isdir(recipe_dir): | 263 | if dirname == "reduced_proc_pressure" or not os.path.isdir(recipe_dir): |
242 | continue | 264 | continue |
243 | name, epoch, version, revision = cls.split_nevr(dirname) | 265 | name, epoch, version, revision = cls.split_nevr(dirname) |
244 | bsrecipe = BSRecipe(name, epoch, version, revision) | 266 | bsrecipe = BSRecipe(name, epoch, version, revision) |
245 | for task in os.listdir(recipe_dir): | 267 | for task in os.listdir(recipe_dir): |
246 | bsrecipe.tasks[task] = BSTask.from_file( | 268 | bsrecipe.tasks[task] = BSTask.from_file( |
247 | os.path.join(recipe_dir, task)) | 269 | os.path.join(recipe_dir, task), build_end) |
248 | if name in buildstats: | 270 | if name in buildstats: |
249 | raise BSError("Cannot handle multiple versions of the same " | 271 | raise BSError("Cannot handle multiple versions of the same " |
250 | "package ({})".format(name)) | 272 | "package ({})".format(name)) |
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py index e69a10f452..86aadf39a6 100644 --- a/scripts/lib/checklayer/__init__.py +++ b/scripts/lib/checklayer/__init__.py | |||
@@ -16,6 +16,7 @@ class LayerType(Enum): | |||
16 | BSP = 0 | 16 | BSP = 0 |
17 | DISTRO = 1 | 17 | DISTRO = 1 |
18 | SOFTWARE = 2 | 18 | SOFTWARE = 2 |
19 | CORE = 3 | ||
19 | ERROR_NO_LAYER_CONF = 98 | 20 | ERROR_NO_LAYER_CONF = 98 |
20 | ERROR_BSP_DISTRO = 99 | 21 | ERROR_BSP_DISTRO = 99 |
21 | 22 | ||
@@ -43,7 +44,7 @@ def _get_layer_collections(layer_path, lconf=None, data=None): | |||
43 | 44 | ||
44 | ldata.setVar('LAYERDIR', layer_path) | 45 | ldata.setVar('LAYERDIR', layer_path) |
45 | try: | 46 | try: |
46 | ldata = bb.parse.handle(lconf, ldata, include=True) | 47 | ldata = bb.parse.handle(lconf, ldata, include=True, baseconfig=True) |
47 | except: | 48 | except: |
48 | raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path) | 49 | raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path) |
49 | ldata.expandVarref('LAYERDIR') | 50 | ldata.expandVarref('LAYERDIR') |
@@ -106,7 +107,13 @@ def _detect_layer(layer_path): | |||
106 | if distros: | 107 | if distros: |
107 | is_distro = True | 108 | is_distro = True |
108 | 109 | ||
109 | if is_bsp and is_distro: | 110 | layer['collections'] = _get_layer_collections(layer['path']) |
111 | |||
112 | if layer_name == "meta" and "core" in layer['collections']: | ||
113 | layer['type'] = LayerType.CORE | ||
114 | layer['conf']['machines'] = machines | ||
115 | layer['conf']['distros'] = distros | ||
116 | elif is_bsp and is_distro: | ||
110 | layer['type'] = LayerType.ERROR_BSP_DISTRO | 117 | layer['type'] = LayerType.ERROR_BSP_DISTRO |
111 | elif is_bsp: | 118 | elif is_bsp: |
112 | layer['type'] = LayerType.BSP | 119 | layer['type'] = LayerType.BSP |
@@ -117,8 +124,6 @@ def _detect_layer(layer_path): | |||
117 | else: | 124 | else: |
118 | layer['type'] = LayerType.SOFTWARE | 125 | layer['type'] = LayerType.SOFTWARE |
119 | 126 | ||
120 | layer['collections'] = _get_layer_collections(layer['path']) | ||
121 | |||
122 | return layer | 127 | return layer |
123 | 128 | ||
124 | def detect_layers(layer_directories, no_auto): | 129 | def detect_layers(layer_directories, no_auto): |
@@ -156,6 +161,27 @@ def _find_layer(depend, layers): | |||
156 | return layer | 161 | return layer |
157 | return None | 162 | return None |
158 | 163 | ||
164 | def sanity_check_layers(layers, logger): | ||
165 | """ | ||
166 | Check that we didn't find duplicate collection names, as the layer that will | ||
167 | be used is non-deterministic. The precise check is duplicate collections | ||
168 | with different patterns, as the same pattern being repeated won't cause | ||
169 | problems. | ||
170 | """ | ||
171 | import collections | ||
172 | |||
173 | passed = True | ||
174 | seen = collections.defaultdict(set) | ||
175 | for layer in layers: | ||
176 | for name, data in layer.get("collections", {}).items(): | ||
177 | seen[name].add(data["pattern"]) | ||
178 | |||
179 | for name, patterns in seen.items(): | ||
180 | if len(patterns) > 1: | ||
181 | passed = False | ||
182 | logger.error("Collection %s found multiple times: %s" % (name, ", ".join(patterns))) | ||
183 | return passed | ||
184 | |||
159 | def get_layer_dependencies(layer, layers, logger): | 185 | def get_layer_dependencies(layer, layers, logger): |
160 | def recurse_dependencies(depends, layer, layers, logger, ret = []): | 186 | def recurse_dependencies(depends, layer, layers, logger, ret = []): |
161 | logger.debug('Processing dependencies %s for layer %s.' % \ | 187 | logger.debug('Processing dependencies %s for layer %s.' % \ |
@@ -261,7 +287,7 @@ def check_command(error_msg, cmd, cwd=None): | |||
261 | raise RuntimeError(msg) | 287 | raise RuntimeError(msg) |
262 | return output | 288 | return output |
263 | 289 | ||
264 | def get_signatures(builddir, failsafe=False, machine=None): | 290 | def get_signatures(builddir, failsafe=False, machine=None, extravars=None): |
265 | import re | 291 | import re |
266 | 292 | ||
267 | # some recipes needs to be excluded like meta-world-pkgdata | 293 | # some recipes needs to be excluded like meta-world-pkgdata |
@@ -272,13 +298,16 @@ def get_signatures(builddir, failsafe=False, machine=None): | |||
272 | sigs = {} | 298 | sigs = {} |
273 | tune2tasks = {} | 299 | tune2tasks = {} |
274 | 300 | ||
275 | cmd = 'BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" ' | 301 | cmd = 'BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" ' |
302 | if extravars: | ||
303 | cmd += extravars | ||
304 | cmd += ' ' | ||
276 | if machine: | 305 | if machine: |
277 | cmd += 'MACHINE=%s ' % machine | 306 | cmd += 'MACHINE=%s ' % machine |
278 | cmd += 'bitbake ' | 307 | cmd += 'bitbake ' |
279 | if failsafe: | 308 | if failsafe: |
280 | cmd += '-k ' | 309 | cmd += '-k ' |
281 | cmd += '-S none world' | 310 | cmd += '-S lockedsigs world' |
282 | sigs_file = os.path.join(builddir, 'locked-sigs.inc') | 311 | sigs_file = os.path.join(builddir, 'locked-sigs.inc') |
283 | if os.path.exists(sigs_file): | 312 | if os.path.exists(sigs_file): |
284 | os.unlink(sigs_file) | 313 | os.unlink(sigs_file) |
@@ -295,8 +324,8 @@ def get_signatures(builddir, failsafe=False, machine=None): | |||
295 | else: | 324 | else: |
296 | raise | 325 | raise |
297 | 326 | ||
298 | sig_regex = re.compile("^(?P<task>.*:.*):(?P<hash>.*) .$") | 327 | sig_regex = re.compile(r"^(?P<task>.*:.*):(?P<hash>.*) .$") |
299 | tune_regex = re.compile("(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*") | 328 | tune_regex = re.compile(r"(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*") |
300 | current_tune = None | 329 | current_tune = None |
301 | with open(sigs_file, 'r') as f: | 330 | with open(sigs_file, 'r') as f: |
302 | for line in f.readlines(): | 331 | for line in f.readlines(): |
@@ -423,3 +452,15 @@ def compare_signatures(old_sigs, curr_sigs): | |||
423 | msg.extend([' ' + line for line in output.splitlines()]) | 452 | msg.extend([' ' + line for line in output.splitlines()]) |
424 | msg.append('') | 453 | msg.append('') |
425 | return '\n'.join(msg) | 454 | return '\n'.join(msg) |
455 | |||
456 | |||
457 | def get_git_toplevel(directory): | ||
458 | """ | ||
459 | Try and find the top of the git repository that directory might be in. | ||
460 | Returns the top-level directory, or None. | ||
461 | """ | ||
462 | cmd = ["git", "-C", directory, "rev-parse", "--show-toplevel"] | ||
463 | try: | ||
464 | return subprocess.check_output(cmd, text=True).strip() | ||
465 | except: | ||
466 | return None | ||
diff --git a/scripts/lib/checklayer/cases/bsp.py b/scripts/lib/checklayer/cases/bsp.py index 7fd56f5d36..b76163fb56 100644 --- a/scripts/lib/checklayer/cases/bsp.py +++ b/scripts/lib/checklayer/cases/bsp.py | |||
@@ -11,7 +11,7 @@ from checklayer.case import OECheckLayerTestCase | |||
11 | class BSPCheckLayer(OECheckLayerTestCase): | 11 | class BSPCheckLayer(OECheckLayerTestCase): |
12 | @classmethod | 12 | @classmethod |
13 | def setUpClass(self): | 13 | def setUpClass(self): |
14 | if self.tc.layer['type'] != LayerType.BSP: | 14 | if self.tc.layer['type'] not in (LayerType.BSP, LayerType.CORE): |
15 | raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\ | 15 | raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\ |
16 | self.tc.layer['name']) | 16 | self.tc.layer['name']) |
17 | 17 | ||
@@ -153,7 +153,7 @@ class BSPCheckLayer(OECheckLayerTestCase): | |||
153 | # do_build can be ignored: it is know to have | 153 | # do_build can be ignored: it is know to have |
154 | # different signatures in some cases, for example in | 154 | # different signatures in some cases, for example in |
155 | # the allarch ca-certificates due to RDEPENDS=openssl. | 155 | # the allarch ca-certificates due to RDEPENDS=openssl. |
156 | # That particular dependency is whitelisted via | 156 | # That particular dependency is marked via |
157 | # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up | 157 | # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up |
158 | # in the sstate signature hash because filtering it | 158 | # in the sstate signature hash because filtering it |
159 | # out would be hard and running do_build multiple | 159 | # out would be hard and running do_build multiple |
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py index fdfb5d18cd..ddead69a7b 100644 --- a/scripts/lib/checklayer/cases/common.py +++ b/scripts/lib/checklayer/cases/common.py | |||
@@ -7,11 +7,14 @@ import glob | |||
7 | import os | 7 | import os |
8 | import unittest | 8 | import unittest |
9 | import re | 9 | import re |
10 | from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures | 10 | from checklayer import get_signatures, LayerType, check_command, compare_signatures, get_git_toplevel |
11 | from checklayer.case import OECheckLayerTestCase | 11 | from checklayer.case import OECheckLayerTestCase |
12 | 12 | ||
13 | class CommonCheckLayer(OECheckLayerTestCase): | 13 | class CommonCheckLayer(OECheckLayerTestCase): |
14 | def test_readme(self): | 14 | def test_readme(self): |
15 | if self.tc.layer['type'] == LayerType.CORE: | ||
16 | raise unittest.SkipTest("Core layer's README is top level") | ||
17 | |||
15 | # The top-level README file may have a suffix (like README.rst or README.txt). | 18 | # The top-level README file may have a suffix (like README.rst or README.txt). |
16 | readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*')) | 19 | readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*')) |
17 | self.assertTrue(len(readme_files) > 0, | 20 | self.assertTrue(len(readme_files) > 0, |
@@ -31,12 +34,44 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
31 | if re.search('README', data, re.IGNORECASE): | 34 | if re.search('README', data, re.IGNORECASE): |
32 | return | 35 | return |
33 | 36 | ||
34 | self.assertIn('maintainer', data) | 37 | self.assertIn('maintainer', data.lower()) |
35 | self.assertIn('patch',data) | 38 | self.assertIn('patch', data.lower()) |
36 | # Check that there is an email address in the README | 39 | # Check that there is an email address in the README |
37 | email_regex = re.compile(r"[^@]+@[^@]+") | 40 | email_regex = re.compile(r"[^@]+@[^@]+") |
38 | self.assertTrue(email_regex.match(data)) | 41 | self.assertTrue(email_regex.match(data)) |
39 | 42 | ||
43 | def find_file_by_name(self, globs): | ||
44 | """ | ||
45 | Utility function to find a file that matches the specified list of | ||
46 | globs, in either the layer directory itself or the repository top-level | ||
47 | directory. | ||
48 | """ | ||
49 | directories = [self.tc.layer["path"]] | ||
50 | toplevel = get_git_toplevel(directories[0]) | ||
51 | if toplevel: | ||
52 | directories.append(toplevel) | ||
53 | |||
54 | for path in directories: | ||
55 | for name in globs: | ||
56 | files = glob.glob(os.path.join(path, name)) | ||
57 | if files: | ||
58 | return sorted(files)[0] | ||
59 | return None | ||
60 | |||
61 | def test_security(self): | ||
62 | """ | ||
63 | Test that the layer has a SECURITY.md (or similar) file, either in the | ||
64 | layer itself or at the top of the containing git repository. | ||
65 | """ | ||
66 | if self.tc.layer["type"] == LayerType.CORE: | ||
67 | raise unittest.SkipTest("Core layer's SECURITY is top level") | ||
68 | |||
69 | filename = self.find_file_by_name(("SECURITY", "SECURITY.*")) | ||
70 | self.assertTrue(filename, msg="Layer doesn't contain a SECURITY.md file.") | ||
71 | |||
72 | size = os.path.getsize(filename) | ||
73 | self.assertGreater(size, 0, msg=f"{filename} has no content.") | ||
74 | |||
40 | def test_parse(self): | 75 | def test_parse(self): |
41 | check_command('Layer %s failed to parse.' % self.tc.layer['name'], | 76 | check_command('Layer %s failed to parse.' % self.tc.layer['name'], |
42 | 'bitbake -p') | 77 | 'bitbake -p') |
@@ -54,6 +89,35 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
54 | ''' | 89 | ''' |
55 | get_signatures(self.td['builddir'], failsafe=False) | 90 | get_signatures(self.td['builddir'], failsafe=False) |
56 | 91 | ||
92 | def test_world_inherit_class(self): | ||
93 | ''' | ||
94 | This also does "bitbake -S none world" along with inheriting "yocto-check-layer" | ||
95 | class, which can do additional per-recipe test cases. | ||
96 | ''' | ||
97 | msg = [] | ||
98 | try: | ||
99 | get_signatures(self.td['builddir'], failsafe=False, machine=None, extravars='BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS INHERIT" INHERIT="yocto-check-layer"') | ||
100 | except RuntimeError as ex: | ||
101 | msg.append(str(ex)) | ||
102 | if msg: | ||
103 | msg.insert(0, 'Layer %s failed additional checks from yocto-check-layer.bbclass\nSee below log for specific recipe parsing errors:\n' % \ | ||
104 | self.tc.layer['name']) | ||
105 | self.fail('\n'.join(msg)) | ||
106 | |||
107 | def test_patches_upstream_status(self): | ||
108 | import sys | ||
109 | sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) | ||
110 | import oe.qa | ||
111 | patches = [] | ||
112 | for dirpath, dirs, files in os.walk(self.tc.layer['path']): | ||
113 | for filename in files: | ||
114 | if filename.endswith(".patch"): | ||
115 | ppath = os.path.join(dirpath, filename) | ||
116 | if oe.qa.check_upstream_status(ppath): | ||
117 | patches.append(ppath) | ||
118 | self.assertEqual(len(patches), 0 , \ | ||
119 | msg="Found following patches with malformed or missing upstream status:\n%s" % '\n'.join([str(patch) for patch in patches])) | ||
120 | |||
57 | def test_signatures(self): | 121 | def test_signatures(self): |
58 | if self.tc.layer['type'] == LayerType.SOFTWARE and \ | 122 | if self.tc.layer['type'] == LayerType.SOFTWARE and \ |
59 | not self.tc.test_software_layer_signatures: | 123 | not self.tc.test_software_layer_signatures: |
diff --git a/scripts/lib/checklayer/cases/distro.py b/scripts/lib/checklayer/cases/distro.py index f0bee5493c..a35332451c 100644 --- a/scripts/lib/checklayer/cases/distro.py +++ b/scripts/lib/checklayer/cases/distro.py | |||
@@ -11,7 +11,7 @@ from checklayer.case import OECheckLayerTestCase | |||
11 | class DistroCheckLayer(OECheckLayerTestCase): | 11 | class DistroCheckLayer(OECheckLayerTestCase): |
12 | @classmethod | 12 | @classmethod |
13 | def setUpClass(self): | 13 | def setUpClass(self): |
14 | if self.tc.layer['type'] != LayerType.DISTRO: | 14 | if self.tc.layer['type'] not in (LayerType.DISTRO, LayerType.CORE): |
15 | raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\ | 15 | raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\ |
16 | self.tc.layer['name']) | 16 | self.tc.layer['name']) |
17 | 17 | ||
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py index 702db669de..fa6e1a34fd 100644 --- a/scripts/lib/devtool/__init__.py +++ b/scripts/lib/devtool/__init__.py | |||
@@ -78,12 +78,15 @@ def exec_fakeroot(d, cmd, **kwargs): | |||
78 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" | 78 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" |
79 | # Grab the command and check it actually exists | 79 | # Grab the command and check it actually exists |
80 | fakerootcmd = d.getVar('FAKEROOTCMD') | 80 | fakerootcmd = d.getVar('FAKEROOTCMD') |
81 | fakerootenv = d.getVar('FAKEROOTENV') | ||
82 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs) | ||
83 | |||
84 | def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs): | ||
81 | if not os.path.exists(fakerootcmd): | 85 | if not os.path.exists(fakerootcmd): |
82 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') | 86 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') |
83 | return 2 | 87 | return 2 |
84 | # Set up the appropriate environment | 88 | # Set up the appropriate environment |
85 | newenv = dict(os.environ) | 89 | newenv = dict(os.environ) |
86 | fakerootenv = d.getVar('FAKEROOTENV') | ||
87 | for varvalue in fakerootenv.split(): | 90 | for varvalue in fakerootenv.split(): |
88 | if '=' in varvalue: | 91 | if '=' in varvalue: |
89 | splitval = varvalue.split('=', 1) | 92 | splitval = varvalue.split('=', 1) |
@@ -231,7 +234,29 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
231 | f.write(line) | 234 | f.write(line) |
232 | 235 | ||
233 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) | 236 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) |
234 | bb.process.run('git tag -f %s' % basetag, cwd=repodir) | 237 | bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir) |
238 | |||
239 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, | ||
240 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe | ||
241 | stdout, _ = bb.process.run("git status --porcelain", cwd=repodir) | ||
242 | found = False | ||
243 | for line in stdout.splitlines(): | ||
244 | if line.endswith("/"): | ||
245 | new_dir = line.split()[1] | ||
246 | for root, dirs, files in os.walk(os.path.join(repodir, new_dir)): | ||
247 | if ".git" in dirs + files: | ||
248 | (stdout, _) = bb.process.run('git remote', cwd=root) | ||
249 | remote = stdout.splitlines()[0] | ||
250 | (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root) | ||
251 | remote_url = stdout.splitlines()[0] | ||
252 | logger.error(os.path.relpath(os.path.join(root, ".."), root)) | ||
253 | bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, "..")) | ||
254 | found = True | ||
255 | if found: | ||
256 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) | ||
257 | found = False | ||
258 | if os.path.exists(os.path.join(repodir, '.gitmodules')): | ||
259 | bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir) | ||
235 | 260 | ||
236 | def recipe_to_append(recipefile, config, wildcard=False): | 261 | def recipe_to_append(recipefile, config, wildcard=False): |
237 | """ | 262 | """ |
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py index 935ffab46c..0b2c3d33dc 100644 --- a/scripts/lib/devtool/build.py +++ b/scripts/lib/devtool/build.py | |||
@@ -49,7 +49,7 @@ def build(args, config, basepath, workspace): | |||
49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) | 49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) |
50 | if not rd: | 50 | if not rd: |
51 | return 1 | 51 | return 1 |
52 | deploytask = 'do_deploy' in rd.getVar('__BBTASKS') | 52 | deploytask = 'do_deploy' in bb.build.listtasks(rd) |
53 | finally: | 53 | finally: |
54 | tinfoil.shutdown() | 54 | tinfoil.shutdown() |
55 | 55 | ||
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py index 6fe02fff2a..990303982c 100644 --- a/scripts/lib/devtool/build_sdk.py +++ b/scripts/lib/devtool/build_sdk.py | |||
@@ -5,15 +5,8 @@ | |||
5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
6 | # | 6 | # |
7 | 7 | ||
8 | import os | ||
9 | import subprocess | ||
10 | import logging | 8 | import logging |
11 | import glob | 9 | from devtool import DevtoolError |
12 | import shutil | ||
13 | import errno | ||
14 | import sys | ||
15 | import tempfile | ||
16 | from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError | ||
17 | from devtool import build_image | 10 | from devtool import build_image |
18 | 11 | ||
19 | logger = logging.getLogger('devtool') | 12 | logger = logging.getLogger('devtool') |
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py index 833322571f..b5ca8f2c2f 100644 --- a/scripts/lib/devtool/deploy.py +++ b/scripts/lib/devtool/deploy.py | |||
@@ -16,7 +16,7 @@ import bb.utils | |||
16 | import argparse_oe | 16 | import argparse_oe |
17 | import oe.types | 17 | import oe.types |
18 | 18 | ||
19 | from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError | 19 | from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError |
20 | 20 | ||
21 | logger = logging.getLogger('devtool') | 21 | logger = logging.getLogger('devtool') |
22 | 22 | ||
@@ -133,16 +133,38 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals | |||
133 | 133 | ||
134 | return '\n'.join(lines) | 134 | return '\n'.join(lines) |
135 | 135 | ||
136 | |||
137 | |||
138 | def deploy(args, config, basepath, workspace): | 136 | def deploy(args, config, basepath, workspace): |
139 | """Entry point for the devtool 'deploy' subcommand""" | 137 | """Entry point for the devtool 'deploy' subcommand""" |
140 | import math | 138 | import oe.utils |
141 | import oe.recipeutils | ||
142 | import oe.package | ||
143 | 139 | ||
144 | check_workspace_recipe(workspace, args.recipename, checksrc=False) | 140 | check_workspace_recipe(workspace, args.recipename, checksrc=False) |
145 | 141 | ||
142 | tinfoil = setup_tinfoil(basepath=basepath) | ||
143 | try: | ||
144 | try: | ||
145 | rd = tinfoil.parse_recipe(args.recipename) | ||
146 | except Exception as e: | ||
147 | raise DevtoolError('Exception parsing recipe %s: %s' % | ||
148 | (args.recipename, e)) | ||
149 | |||
150 | srcdir = rd.getVar('D') | ||
151 | workdir = rd.getVar('WORKDIR') | ||
152 | path = rd.getVar('PATH') | ||
153 | strip_cmd = rd.getVar('STRIP') | ||
154 | libdir = rd.getVar('libdir') | ||
155 | base_libdir = rd.getVar('base_libdir') | ||
156 | max_process = oe.utils.get_bb_number_threads(rd) | ||
157 | fakerootcmd = rd.getVar('FAKEROOTCMD') | ||
158 | fakerootenv = rd.getVar('FAKEROOTENV') | ||
159 | finally: | ||
160 | tinfoil.shutdown() | ||
161 | |||
162 | return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args) | ||
163 | |||
164 | def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args): | ||
165 | import math | ||
166 | import oe.package | ||
167 | |||
146 | try: | 168 | try: |
147 | host, destdir = args.target.split(':') | 169 | host, destdir = args.target.split(':') |
148 | except ValueError: | 170 | except ValueError: |
@@ -152,118 +174,108 @@ def deploy(args, config, basepath, workspace): | |||
152 | if not destdir.endswith('/'): | 174 | if not destdir.endswith('/'): |
153 | destdir += '/' | 175 | destdir += '/' |
154 | 176 | ||
155 | tinfoil = setup_tinfoil(basepath=basepath) | 177 | recipe_outdir = srcdir |
156 | try: | 178 | if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): |
157 | try: | 179 | raise DevtoolError('No files to deploy - have you built the %s ' |
158 | rd = tinfoil.parse_recipe(args.recipename) | 180 | 'recipe? If so, the install step has not installed ' |
159 | except Exception as e: | 181 | 'any files.' % args.recipename) |
160 | raise DevtoolError('Exception parsing recipe %s: %s' % | 182 | |
161 | (args.recipename, e)) | 183 | if args.strip and not args.dry_run: |
162 | recipe_outdir = rd.getVar('D') | 184 | # Fakeroot copy to new destination |
163 | if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): | 185 | srcdir = recipe_outdir |
164 | raise DevtoolError('No files to deploy - have you built the %s ' | 186 | recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped') |
165 | 'recipe? If so, the install step has not installed ' | 187 | if os.path.isdir(recipe_outdir): |
166 | 'any files.' % args.recipename) | 188 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True) |
167 | 189 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) | |
168 | if args.strip and not args.dry_run: | 190 | os.environ['PATH'] = ':'.join([os.environ['PATH'], path or '']) |
169 | # Fakeroot copy to new destination | 191 | oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process) |
170 | srcdir = recipe_outdir | 192 | |
171 | recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'devtool-deploy-target-stripped') | 193 | filelist = [] |
172 | if os.path.isdir(recipe_outdir): | 194 | inodes = set({}) |
173 | bb.utils.remove(recipe_outdir, True) | 195 | ftotalsize = 0 |
174 | exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) | 196 | for root, _, files in os.walk(recipe_outdir): |
175 | os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) | 197 | for fn in files: |
176 | oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), | 198 | fstat = os.lstat(os.path.join(root, fn)) |
177 | rd.getVar('base_libdir'), rd) | 199 | # Get the size in kiB (since we'll be comparing it to the output of du -k) |
178 | 200 | # MUST use lstat() here not stat() or getfilesize() since we don't want to | |
179 | filelist = [] | 201 | # dereference symlinks |
180 | inodes = set({}) | 202 | if fstat.st_ino in inodes: |
181 | ftotalsize = 0 | 203 | fsize = 0 |
182 | for root, _, files in os.walk(recipe_outdir): | 204 | else: |
183 | for fn in files: | 205 | fsize = int(math.ceil(float(fstat.st_size)/1024)) |
184 | fstat = os.lstat(os.path.join(root, fn)) | 206 | inodes.add(fstat.st_ino) |
185 | # Get the size in kiB (since we'll be comparing it to the output of du -k) | 207 | ftotalsize += fsize |
186 | # MUST use lstat() here not stat() or getfilesize() since we don't want to | 208 | # The path as it would appear on the target |
187 | # dereference symlinks | 209 | fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn) |
188 | if fstat.st_ino in inodes: | 210 | filelist.append((fpath, fsize)) |
189 | fsize = 0 | 211 | |
190 | else: | 212 | if args.dry_run: |
191 | fsize = int(math.ceil(float(fstat.st_size)/1024)) | 213 | print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) |
192 | inodes.add(fstat.st_ino) | 214 | for item, _ in filelist: |
193 | ftotalsize += fsize | 215 | print(' %s' % item) |
194 | # The path as it would appear on the target | 216 | return 0 |
195 | fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn) | ||
196 | filelist.append((fpath, fsize)) | ||
197 | |||
198 | if args.dry_run: | ||
199 | print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) | ||
200 | for item, _ in filelist: | ||
201 | print(' %s' % item) | ||
202 | return 0 | ||
203 | |||
204 | extraoptions = '' | ||
205 | if args.no_host_check: | ||
206 | extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | ||
207 | if not args.show_status: | ||
208 | extraoptions += ' -q' | ||
209 | |||
210 | scp_sshexec = '' | ||
211 | ssh_sshexec = 'ssh' | ||
212 | if args.ssh_exec: | ||
213 | scp_sshexec = "-S %s" % args.ssh_exec | ||
214 | ssh_sshexec = args.ssh_exec | ||
215 | scp_port = '' | ||
216 | ssh_port = '' | ||
217 | if args.port: | ||
218 | scp_port = "-P %s" % args.port | ||
219 | ssh_port = "-p %s" % args.port | ||
220 | |||
221 | if args.key: | ||
222 | extraoptions += ' -i %s' % args.key | ||
223 | |||
224 | # In order to delete previously deployed files and have the manifest file on | ||
225 | # the target, we write out a shell script and then copy it to the target | ||
226 | # so we can then run it (piping tar output to it). | ||
227 | # (We cannot use scp here, because it doesn't preserve symlinks.) | ||
228 | tmpdir = tempfile.mkdtemp(prefix='devtool') | ||
229 | try: | ||
230 | tmpscript = '/tmp/devtool_deploy.sh' | ||
231 | tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list') | ||
232 | shellscript = _prepare_remote_script(deploy=True, | ||
233 | verbose=args.show_status, | ||
234 | nopreserve=args.no_preserve, | ||
235 | nocheckspace=args.no_check_space) | ||
236 | # Write out the script to a file | ||
237 | with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: | ||
238 | f.write(shellscript) | ||
239 | # Write out the file list | ||
240 | with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f: | ||
241 | f.write('%d\n' % ftotalsize) | ||
242 | for fpath, fsize in filelist: | ||
243 | f.write('%s %d\n' % (fpath, fsize)) | ||
244 | # Copy them to the target | ||
245 | ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) | ||
246 | if ret != 0: | ||
247 | raise DevtoolError('Failed to copy script to %s - rerun with -s to ' | ||
248 | 'get a complete error message' % args.target) | ||
249 | finally: | ||
250 | shutil.rmtree(tmpdir) | ||
251 | 217 | ||
252 | # Now run the script | 218 | extraoptions = '' |
253 | ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) | 219 | if args.no_host_check: |
254 | if ret != 0: | 220 | extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' |
255 | raise DevtoolError('Deploy failed - rerun with -s to get a complete ' | 221 | if not args.show_status: |
256 | 'error message') | 222 | extraoptions += ' -q' |
257 | 223 | ||
258 | logger.info('Successfully deployed %s' % recipe_outdir) | 224 | scp_sshexec = '' |
225 | ssh_sshexec = 'ssh' | ||
226 | if args.ssh_exec: | ||
227 | scp_sshexec = "-S %s" % args.ssh_exec | ||
228 | ssh_sshexec = args.ssh_exec | ||
229 | scp_port = '' | ||
230 | ssh_port = '' | ||
231 | if args.port: | ||
232 | scp_port = "-P %s" % args.port | ||
233 | ssh_port = "-p %s" % args.port | ||
234 | |||
235 | if args.key: | ||
236 | extraoptions += ' -i %s' % args.key | ||
259 | 237 | ||
260 | files_list = [] | 238 | # In order to delete previously deployed files and have the manifest file on |
261 | for root, _, files in os.walk(recipe_outdir): | 239 | # the target, we write out a shell script and then copy it to the target |
262 | for filename in files: | 240 | # so we can then run it (piping tar output to it). |
263 | filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) | 241 | # (We cannot use scp here, because it doesn't preserve symlinks.) |
264 | files_list.append(os.path.join(destdir, filename)) | 242 | tmpdir = tempfile.mkdtemp(prefix='devtool') |
243 | try: | ||
244 | tmpscript = '/tmp/devtool_deploy.sh' | ||
245 | tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list') | ||
246 | shellscript = _prepare_remote_script(deploy=True, | ||
247 | verbose=args.show_status, | ||
248 | nopreserve=args.no_preserve, | ||
249 | nocheckspace=args.no_check_space) | ||
250 | # Write out the script to a file | ||
251 | with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: | ||
252 | f.write(shellscript) | ||
253 | # Write out the file list | ||
254 | with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f: | ||
255 | f.write('%d\n' % ftotalsize) | ||
256 | for fpath, fsize in filelist: | ||
257 | f.write('%s %d\n' % (fpath, fsize)) | ||
258 | # Copy them to the target | ||
259 | ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) | ||
260 | if ret != 0: | ||
261 | raise DevtoolError('Failed to copy script to %s - rerun with -s to ' | ||
262 | 'get a complete error message' % args.target) | ||
265 | finally: | 263 | finally: |
266 | tinfoil.shutdown() | 264 | shutil.rmtree(tmpdir) |
265 | |||
266 | # Now run the script | ||
267 | ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) | ||
268 | if ret != 0: | ||
269 | raise DevtoolError('Deploy failed - rerun with -s to get a complete ' | ||
270 | 'error message') | ||
271 | |||
272 | logger.info('Successfully deployed %s' % recipe_outdir) | ||
273 | |||
274 | files_list = [] | ||
275 | for root, _, files in os.walk(recipe_outdir): | ||
276 | for filename in files: | ||
277 | filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) | ||
278 | files_list.append(os.path.join(destdir, filename)) | ||
267 | 279 | ||
268 | return 0 | 280 | return 0 |
269 | 281 | ||
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py new file mode 100644 index 0000000000..19c2f61c5f --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/__init__.py | |||
@@ -0,0 +1,282 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk IDE plugin interface definition and helper functions""" | ||
7 | |||
8 | import errno | ||
9 | import json | ||
10 | import logging | ||
11 | import os | ||
12 | import stat | ||
13 | from enum import Enum, auto | ||
14 | from devtool import DevtoolError | ||
15 | from bb.utils import mkdirhier | ||
16 | |||
17 | logger = logging.getLogger('devtool') | ||
18 | |||
19 | |||
20 | class BuildTool(Enum): | ||
21 | UNDEFINED = auto() | ||
22 | CMAKE = auto() | ||
23 | MESON = auto() | ||
24 | |||
25 | @property | ||
26 | def is_c_ccp(self): | ||
27 | if self is BuildTool.CMAKE: | ||
28 | return True | ||
29 | if self is BuildTool.MESON: | ||
30 | return True | ||
31 | return False | ||
32 | |||
33 | |||
34 | class GdbCrossConfig: | ||
35 | """Base class defining the GDB configuration generator interface | ||
36 | |||
37 | Generate a GDB configuration for a binary on the target device. | ||
38 | Only one instance per binary is allowed. This allows to assign unique port | ||
39 | numbers for all gdbserver instances. | ||
40 | """ | ||
41 | _gdbserver_port_next = 1234 | ||
42 | _binaries = [] | ||
43 | |||
44 | def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True): | ||
45 | self.image_recipe = image_recipe | ||
46 | self.modified_recipe = modified_recipe | ||
47 | self.gdb_cross = modified_recipe.gdb_cross | ||
48 | self.binary = binary | ||
49 | if binary in GdbCrossConfig._binaries: | ||
50 | raise DevtoolError( | ||
51 | "gdbserver config for binary %s is already generated" % binary) | ||
52 | GdbCrossConfig._binaries.append(binary) | ||
53 | self.script_dir = modified_recipe.ide_sdk_scripts_dir | ||
54 | self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit') | ||
55 | self.gdbserver_multi = gdbserver_multi | ||
56 | self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-') | ||
57 | self.gdbserver_port = GdbCrossConfig._gdbserver_port_next | ||
58 | GdbCrossConfig._gdbserver_port_next += 1 | ||
59 | self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty) | ||
60 | # gdbserver start script | ||
61 | gdbserver_script_file = 'gdbserver_' + self.id_pretty | ||
62 | if self.gdbserver_multi: | ||
63 | gdbserver_script_file += "_m" | ||
64 | self.gdbserver_script = os.path.join( | ||
65 | self.script_dir, gdbserver_script_file) | ||
66 | # gdbinit file | ||
67 | self.gdbinit = os.path.join( | ||
68 | self.gdbinit_dir, 'gdbinit_' + self.id_pretty) | ||
69 | # gdb start script | ||
70 | self.gdb_script = os.path.join( | ||
71 | self.script_dir, 'gdb_' + self.id_pretty) | ||
72 | |||
73 | def _gen_gdbserver_start_script(self): | ||
74 | """Generate a shell command starting the gdbserver on the remote device via ssh | ||
75 | |||
76 | GDB supports two modes: | ||
77 | multi: gdbserver remains running over several debug sessions | ||
78 | once: gdbserver terminates after the debugged process terminates | ||
79 | """ | ||
80 | cmd_lines = ['#!/bin/sh'] | ||
81 | if self.gdbserver_multi: | ||
82 | temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty | ||
83 | gdbserver_cmd_start = temp_dir | ||
84 | gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; " | ||
85 | gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; " | ||
86 | gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % ( | ||
87 | self.gdb_cross.gdbserver_path, self.gdbserver_port) | ||
88 | gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;" | ||
89 | |||
90 | gdbserver_cmd_stop = temp_dir | ||
91 | gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); " | ||
92 | gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; " | ||
93 | |||
94 | gdbserver_cmd_l = [] | ||
95 | gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then') | ||
96 | gdbserver_cmd_l.append(' shift') | ||
97 | gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % ( | ||
98 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop)) | ||
99 | gdbserver_cmd_l.append('else') | ||
100 | gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % ( | ||
101 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)) | ||
102 | gdbserver_cmd_l.append('fi') | ||
103 | gdbserver_cmd = os.linesep.join(gdbserver_cmd_l) | ||
104 | else: | ||
105 | gdbserver_cmd_start = "%s --once :%s %s" % ( | ||
106 | self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary) | ||
107 | gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % ( | ||
108 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start) | ||
109 | cmd_lines.append(gdbserver_cmd) | ||
110 | GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True) | ||
111 | |||
112 | def _gen_gdbinit_config(self): | ||
113 | """Generate a gdbinit file for this binary and the corresponding gdbserver configuration""" | ||
114 | gdbinit_lines = ['# This file is generated by devtool ide-sdk'] | ||
115 | if self.gdbserver_multi: | ||
116 | target_help = '# gdbserver --multi :%d' % self.gdbserver_port | ||
117 | remote_cmd = 'target extended-remote' | ||
118 | else: | ||
119 | target_help = '# gdbserver :%d %s' % ( | ||
120 | self.gdbserver_port, self.binary) | ||
121 | remote_cmd = 'target remote' | ||
122 | gdbinit_lines.append('# On the remote target:') | ||
123 | gdbinit_lines.append(target_help) | ||
124 | gdbinit_lines.append('# On the build machine:') | ||
125 | gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree) | ||
126 | gdbinit_lines.append( | ||
127 | '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit) | ||
128 | |||
129 | gdbinit_lines.append('set sysroot ' + self.modified_recipe.d) | ||
130 | gdbinit_lines.append('set substitute-path "/usr/include" "' + | ||
131 | os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"') | ||
132 | # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir. | ||
133 | gdbinit_lines.append('set debuginfod enabled off') | ||
134 | if self.image_recipe.rootfs_dbg: | ||
135 | gdbinit_lines.append( | ||
136 | 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"') | ||
137 | # First: Search for sources of this recipe in the workspace folder | ||
138 | if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir: | ||
139 | gdbinit_lines.append('set substitute-path "%s" "%s"' % | ||
140 | (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree)) | ||
141 | else: | ||
142 | logger.error( | ||
143 | "TARGET_DBGSRC_DIR must contain the recipe name PN.") | ||
144 | # Second: Search for sources of other recipes in the rootfs-dbg | ||
145 | if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"): | ||
146 | gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join( | ||
147 | self.image_recipe.rootfs_dbg, "usr", "src", "debug")) | ||
148 | else: | ||
149 | logger.error( | ||
150 | "TARGET_DBGSRC_DIR must start with /usr/src/debug.") | ||
151 | else: | ||
152 | logger.warning( | ||
153 | "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.") | ||
154 | gdbinit_lines.append( | ||
155 | '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port)) | ||
156 | gdbinit_lines.append('set remote exec-file ' + self.binary) | ||
157 | gdbinit_lines.append( | ||
158 | 'run ' + os.path.join(self.modified_recipe.d, self.binary)) | ||
159 | |||
160 | GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines) | ||
161 | |||
162 | def _gen_gdb_start_script(self): | ||
163 | """Generate a script starting GDB with the corresponding gdbinit configuration.""" | ||
164 | cmd_lines = ['#!/bin/sh'] | ||
165 | cmd_lines.append('cd ' + self.modified_recipe.real_srctree) | ||
166 | cmd_lines.append(self.gdb_cross.gdb + ' -ix ' + | ||
167 | self.gdbinit + ' "$@"') | ||
168 | GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True) | ||
169 | |||
170 | def initialize(self): | ||
171 | self._gen_gdbserver_start_script() | ||
172 | self._gen_gdbinit_config() | ||
173 | self._gen_gdb_start_script() | ||
174 | |||
175 | @staticmethod | ||
176 | def write_file(script_file, cmd_lines, executable=False): | ||
177 | script_dir = os.path.dirname(script_file) | ||
178 | mkdirhier(script_dir) | ||
179 | with open(script_file, 'w') as script_f: | ||
180 | script_f.write(os.linesep.join(cmd_lines)) | ||
181 | script_f.write(os.linesep) | ||
182 | if executable: | ||
183 | st = os.stat(script_file) | ||
184 | os.chmod(script_file, st.st_mode | stat.S_IEXEC) | ||
185 | logger.info("Created: %s" % script_file) | ||
186 | |||
187 | |||
188 | class IdeBase: | ||
189 | """Base class defining the interface for IDE plugins""" | ||
190 | |||
191 | def __init__(self): | ||
192 | self.ide_name = 'undefined' | ||
193 | self.gdb_cross_configs = [] | ||
194 | |||
195 | @classmethod | ||
196 | def ide_plugin_priority(cls): | ||
197 | """Used to find the default ide handler if --ide is not passed""" | ||
198 | return 10 | ||
199 | |||
200 | def setup_shared_sysroots(self, shared_env): | ||
201 | logger.warn("Shared sysroot mode is not supported for IDE %s" % | ||
202 | self.ide_name) | ||
203 | |||
204 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
205 | logger.warn("Modified recipe mode is not supported for IDE %s" % | ||
206 | self.ide_name) | ||
207 | |||
208 | def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig): | ||
209 | binaries = modified_recipe.find_installed_binaries() | ||
210 | for binary in binaries: | ||
211 | gdb_cross_config = gdb_cross_config_class( | ||
212 | image_recipe, modified_recipe, binary) | ||
213 | gdb_cross_config.initialize() | ||
214 | self.gdb_cross_configs.append(gdb_cross_config) | ||
215 | |||
216 | @staticmethod | ||
217 | def gen_oe_scrtips_sym_link(modified_recipe): | ||
218 | # create a sym-link from sources to the scripts directory | ||
219 | if os.path.isdir(modified_recipe.ide_sdk_scripts_dir): | ||
220 | IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir, | ||
221 | os.path.join(modified_recipe.real_srctree, 'oe-scripts')) | ||
222 | |||
223 | @staticmethod | ||
224 | def update_json_file(json_dir, json_file, update_dict): | ||
225 | """Update a json file | ||
226 | |||
227 | By default it uses the dict.update function. If this is not sutiable | ||
228 | the update function might be passed via update_func parameter. | ||
229 | """ | ||
230 | json_path = os.path.join(json_dir, json_file) | ||
231 | logger.info("Updating IDE config file: %s (%s)" % | ||
232 | (json_file, json_path)) | ||
233 | if not os.path.exists(json_dir): | ||
234 | os.makedirs(json_dir) | ||
235 | try: | ||
236 | with open(json_path) as f: | ||
237 | orig_dict = json.load(f) | ||
238 | except json.decoder.JSONDecodeError: | ||
239 | logger.info( | ||
240 | "Decoding %s failed. Probably because of comments in the json file" % json_path) | ||
241 | orig_dict = {} | ||
242 | except FileNotFoundError: | ||
243 | orig_dict = {} | ||
244 | orig_dict.update(update_dict) | ||
245 | with open(json_path, 'w') as f: | ||
246 | json.dump(orig_dict, f, indent=4) | ||
247 | |||
248 | @staticmethod | ||
249 | def symlink_force(tgt, dst): | ||
250 | try: | ||
251 | os.symlink(tgt, dst) | ||
252 | except OSError as err: | ||
253 | if err.errno == errno.EEXIST: | ||
254 | if os.readlink(dst) != tgt: | ||
255 | os.remove(dst) | ||
256 | os.symlink(tgt, dst) | ||
257 | else: | ||
258 | raise err | ||
259 | |||
260 | |||
261 | def get_devtool_deploy_opts(args): | ||
262 | """Filter args for devtool deploy-target args""" | ||
263 | if not args.target: | ||
264 | return None | ||
265 | devtool_deploy_opts = [args.target] | ||
266 | if args.no_host_check: | ||
267 | devtool_deploy_opts += ["-c"] | ||
268 | if args.show_status: | ||
269 | devtool_deploy_opts += ["-s"] | ||
270 | if args.no_preserve: | ||
271 | devtool_deploy_opts += ["-p"] | ||
272 | if args.no_check_space: | ||
273 | devtool_deploy_opts += ["--no-check-space"] | ||
274 | if args.ssh_exec: | ||
275 | devtool_deploy_opts += ["-e", args.ssh.exec] | ||
276 | if args.port: | ||
277 | devtool_deploy_opts += ["-P", args.port] | ||
278 | if args.key: | ||
279 | devtool_deploy_opts += ["-I", args.key] | ||
280 | if args.strip is False: | ||
281 | devtool_deploy_opts += ["--no-strip"] | ||
282 | return devtool_deploy_opts | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py new file mode 100644 index 0000000000..ee5bb57265 --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/ide_code.py | |||
@@ -0,0 +1,462 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk IDE plugin for VSCode and VSCodium""" | ||
7 | |||
8 | import json | ||
9 | import logging | ||
10 | import os | ||
11 | import shutil | ||
12 | from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts | ||
13 | |||
14 | logger = logging.getLogger('devtool') | ||
15 | |||
16 | |||
17 | class GdbCrossConfigVSCode(GdbCrossConfig): | ||
18 | def __init__(self, image_recipe, modified_recipe, binary): | ||
19 | super().__init__(image_recipe, modified_recipe, binary, False) | ||
20 | |||
21 | def initialize(self): | ||
22 | self._gen_gdbserver_start_script() | ||
23 | |||
24 | |||
25 | class IdeVSCode(IdeBase): | ||
26 | """Manage IDE configurations for VSCode | ||
27 | |||
28 | Modified recipe mode: | ||
29 | - cmake: use the cmake-preset generated by devtool ide-sdk | ||
30 | - meson: meson is called via a wrapper script generated by devtool ide-sdk | ||
31 | |||
32 | Shared sysroot mode: | ||
33 | In shared sysroot mode, the cross tool-chain is exported to the user's global configuration. | ||
34 | A workspace cannot be created because there is no recipe that defines how a workspace could | ||
35 | be set up. | ||
36 | - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json | ||
37 | The cmake-kit uses the environment script and the tool-chain file | ||
38 | generated by meta-ide-support. | ||
39 | - meson: Meson needs manual workspace configuration. | ||
40 | """ | ||
41 | |||
42 | @classmethod | ||
43 | def ide_plugin_priority(cls): | ||
44 | """If --ide is not passed this is the default plugin""" | ||
45 | if shutil.which('code'): | ||
46 | return 100 | ||
47 | return 0 | ||
48 | |||
49 | def setup_shared_sysroots(self, shared_env): | ||
50 | """Expose the toolchain of the shared sysroots SDK""" | ||
51 | datadir = shared_env.ide_support.datadir | ||
52 | deploy_dir_image = shared_env.ide_support.deploy_dir_image | ||
53 | real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys | ||
54 | standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native | ||
55 | vscode_ws_path = os.path.join( | ||
56 | os.environ['HOME'], '.local', 'share', 'CMakeTools') | ||
57 | cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json') | ||
58 | oecmake_generator = "Ninja" | ||
59 | env_script = os.path.join( | ||
60 | deploy_dir_image, 'environment-setup-' + real_multimach_target_sys) | ||
61 | |||
62 | if not os.path.isdir(vscode_ws_path): | ||
63 | os.makedirs(vscode_ws_path) | ||
64 | cmake_kits_old = [] | ||
65 | if os.path.exists(cmake_kits_path): | ||
66 | with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file: | ||
67 | cmake_kits_old = json.load(cmake_kits_file) | ||
68 | cmake_kits = cmake_kits_old.copy() | ||
69 | |||
70 | cmake_kit_new = { | ||
71 | "name": "OE " + real_multimach_target_sys, | ||
72 | "environmentSetupScript": env_script, | ||
73 | "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake", | ||
74 | "preferredGenerator": { | ||
75 | "name": oecmake_generator | ||
76 | } | ||
77 | } | ||
78 | |||
79 | def merge_kit(cmake_kits, cmake_kit_new): | ||
80 | i = 0 | ||
81 | while i < len(cmake_kits): | ||
82 | if 'environmentSetupScript' in cmake_kits[i] and \ | ||
83 | cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']: | ||
84 | cmake_kits[i] = cmake_kit_new | ||
85 | return | ||
86 | i += 1 | ||
87 | cmake_kits.append(cmake_kit_new) | ||
88 | merge_kit(cmake_kits, cmake_kit_new) | ||
89 | |||
90 | if cmake_kits != cmake_kits_old: | ||
91 | logger.info("Updating: %s" % cmake_kits_path) | ||
92 | with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file: | ||
93 | json.dump(cmake_kits, cmake_kits_file, indent=4) | ||
94 | else: | ||
95 | logger.info("Already up to date: %s" % cmake_kits_path) | ||
96 | |||
97 | cmake_native = os.path.join( | ||
98 | shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake') | ||
99 | if os.path.isfile(cmake_native): | ||
100 | logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native) | ||
101 | else: | ||
102 | logger.error("Cannot find cmake native at: %s" % cmake_native) | ||
103 | |||
104 | def dot_code_dir(self, modified_recipe): | ||
105 | return os.path.join(modified_recipe.srctree, '.vscode') | ||
106 | |||
107 | def __vscode_settings_meson(self, settings_dict, modified_recipe): | ||
108 | if modified_recipe.build_tool is not BuildTool.MESON: | ||
109 | return | ||
110 | settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper | ||
111 | |||
112 | confopts = modified_recipe.mesonopts.split() | ||
113 | confopts += modified_recipe.meson_cross_file.split() | ||
114 | confopts += modified_recipe.extra_oemeson.split() | ||
115 | settings_dict["mesonbuild.configureOptions"] = confopts | ||
116 | settings_dict["mesonbuild.buildFolder"] = modified_recipe.b | ||
117 | |||
118 | def __vscode_settings_cmake(self, settings_dict, modified_recipe): | ||
119 | """Add cmake specific settings to settings.json. | ||
120 | |||
121 | Note: most settings are passed to the cmake preset. | ||
122 | """ | ||
123 | if modified_recipe.build_tool is not BuildTool.CMAKE: | ||
124 | return | ||
125 | settings_dict["cmake.configureOnOpen"] = True | ||
126 | settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree | ||
127 | |||
128 | def vscode_settings(self, modified_recipe, image_recipe): | ||
129 | files_excludes = { | ||
130 | "**/.git/**": True, | ||
131 | "**/oe-logs/**": True, | ||
132 | "**/oe-workdir/**": True, | ||
133 | "**/source-date-epoch/**": True | ||
134 | } | ||
135 | python_exclude = [ | ||
136 | "**/.git/**", | ||
137 | "**/oe-logs/**", | ||
138 | "**/oe-workdir/**", | ||
139 | "**/source-date-epoch/**" | ||
140 | ] | ||
141 | files_readonly = { | ||
142 | modified_recipe.recipe_sysroot + '/**': True, | ||
143 | modified_recipe.recipe_sysroot_native + '/**': True, | ||
144 | } | ||
145 | if image_recipe.rootfs_dbg is not None: | ||
146 | files_readonly[image_recipe.rootfs_dbg + '/**'] = True | ||
147 | settings_dict = { | ||
148 | "files.watcherExclude": files_excludes, | ||
149 | "files.exclude": files_excludes, | ||
150 | "files.readonlyInclude": files_readonly, | ||
151 | "python.analysis.exclude": python_exclude | ||
152 | } | ||
153 | self.__vscode_settings_cmake(settings_dict, modified_recipe) | ||
154 | self.__vscode_settings_meson(settings_dict, modified_recipe) | ||
155 | |||
156 | settings_file = 'settings.json' | ||
157 | IdeBase.update_json_file( | ||
158 | self.dot_code_dir(modified_recipe), settings_file, settings_dict) | ||
159 | |||
160 | def __vscode_extensions_cmake(self, modified_recipe, recommendations): | ||
161 | if modified_recipe.build_tool is not BuildTool.CMAKE: | ||
162 | return | ||
163 | recommendations += [ | ||
164 | "ms-vscode.cmake-tools", | ||
165 | "ms-vscode.cpptools", | ||
166 | "ms-vscode.cpptools-extension-pack", | ||
167 | "ms-vscode.cpptools-themes" | ||
168 | ] | ||
169 | |||
170 | def __vscode_extensions_meson(self, modified_recipe, recommendations): | ||
171 | if modified_recipe.build_tool is not BuildTool.MESON: | ||
172 | return | ||
173 | recommendations += [ | ||
174 | 'mesonbuild.mesonbuild', | ||
175 | "ms-vscode.cpptools", | ||
176 | "ms-vscode.cpptools-extension-pack", | ||
177 | "ms-vscode.cpptools-themes" | ||
178 | ] | ||
179 | |||
180 | def vscode_extensions(self, modified_recipe): | ||
181 | recommendations = [] | ||
182 | self.__vscode_extensions_cmake(modified_recipe, recommendations) | ||
183 | self.__vscode_extensions_meson(modified_recipe, recommendations) | ||
184 | extensions_file = 'extensions.json' | ||
185 | IdeBase.update_json_file( | ||
186 | self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations}) | ||
187 | |||
188 | def vscode_c_cpp_properties(self, modified_recipe): | ||
189 | properties_dict = { | ||
190 | "name": modified_recipe.recipe_id_pretty, | ||
191 | } | ||
192 | if modified_recipe.build_tool is BuildTool.CMAKE: | ||
193 | properties_dict["configurationProvider"] = "ms-vscode.cmake-tools" | ||
194 | elif modified_recipe.build_tool is BuildTool.MESON: | ||
195 | properties_dict["configurationProvider"] = "mesonbuild.mesonbuild" | ||
196 | properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0]) | ||
197 | else: # no C/C++ build | ||
198 | return | ||
199 | |||
200 | properties_dicts = { | ||
201 | "configurations": [ | ||
202 | properties_dict | ||
203 | ], | ||
204 | "version": 4 | ||
205 | } | ||
206 | prop_file = 'c_cpp_properties.json' | ||
207 | IdeBase.update_json_file( | ||
208 | self.dot_code_dir(modified_recipe), prop_file, properties_dicts) | ||
209 | |||
210 | def vscode_launch_bin_dbg(self, gdb_cross_config): | ||
211 | modified_recipe = gdb_cross_config.modified_recipe | ||
212 | |||
213 | launch_config = { | ||
214 | "name": gdb_cross_config.id_pretty, | ||
215 | "type": "cppdbg", | ||
216 | "request": "launch", | ||
217 | "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')), | ||
218 | "stopAtEntry": True, | ||
219 | "cwd": "${workspaceFolder}", | ||
220 | "environment": [], | ||
221 | "externalConsole": False, | ||
222 | "MIMode": "gdb", | ||
223 | "preLaunchTask": gdb_cross_config.id_pretty, | ||
224 | "miDebuggerPath": modified_recipe.gdb_cross.gdb, | ||
225 | "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port) | ||
226 | } | ||
227 | |||
228 | # Search for header files in recipe-sysroot. | ||
229 | src_file_map = { | ||
230 | "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include") | ||
231 | } | ||
232 | # First of all search for not stripped binaries in the image folder. | ||
233 | # These binaries are copied (and optionally stripped) by deploy-target | ||
234 | setup_commands = [ | ||
235 | { | ||
236 | "description": "sysroot", | ||
237 | "text": "set sysroot " + modified_recipe.d | ||
238 | } | ||
239 | ] | ||
240 | |||
241 | if gdb_cross_config.image_recipe.rootfs_dbg: | ||
242 | launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str( | ||
243 | gdb_cross_config.image_recipe) | ||
244 | # First: Search for sources of this recipe in the workspace folder | ||
245 | if modified_recipe.pn in modified_recipe.target_dbgsrc_dir: | ||
246 | src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}" | ||
247 | else: | ||
248 | logger.error( | ||
249 | "TARGET_DBGSRC_DIR must contain the recipe name PN.") | ||
250 | # Second: Search for sources of other recipes in the rootfs-dbg | ||
251 | if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"): | ||
252 | src_file_map["/usr/src/debug"] = os.path.join( | ||
253 | gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug") | ||
254 | else: | ||
255 | logger.error( | ||
256 | "TARGET_DBGSRC_DIR must start with /usr/src/debug.") | ||
257 | else: | ||
258 | logger.warning( | ||
259 | "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.") | ||
260 | |||
261 | launch_config['sourceFileMap'] = src_file_map | ||
262 | launch_config['setupCommands'] = setup_commands | ||
263 | return launch_config | ||
264 | |||
265 | def vscode_launch(self, modified_recipe): | ||
266 | """GDB Launch configuration for binaries (elf files)""" | ||
267 | |||
268 | configurations = [] | ||
269 | for gdb_cross_config in self.gdb_cross_configs: | ||
270 | if gdb_cross_config.modified_recipe is modified_recipe: | ||
271 | configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config)) | ||
272 | launch_dict = { | ||
273 | "version": "0.2.0", | ||
274 | "configurations": configurations | ||
275 | } | ||
276 | launch_file = 'launch.json' | ||
277 | IdeBase.update_json_file( | ||
278 | self.dot_code_dir(modified_recipe), launch_file, launch_dict) | ||
279 | |||
280 | def vscode_tasks_cpp(self, args, modified_recipe): | ||
281 | run_install_deploy = modified_recipe.gen_install_deploy_script(args) | ||
282 | install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty | ||
283 | tasks_dict = { | ||
284 | "version": "2.0.0", | ||
285 | "tasks": [ | ||
286 | { | ||
287 | "label": install_task_name, | ||
288 | "type": "shell", | ||
289 | "command": run_install_deploy, | ||
290 | "problemMatcher": [] | ||
291 | } | ||
292 | ] | ||
293 | } | ||
294 | for gdb_cross_config in self.gdb_cross_configs: | ||
295 | if gdb_cross_config.modified_recipe is not modified_recipe: | ||
296 | continue | ||
297 | tasks_dict['tasks'].append( | ||
298 | { | ||
299 | "label": gdb_cross_config.id_pretty, | ||
300 | "type": "shell", | ||
301 | "isBackground": True, | ||
302 | "dependsOn": [ | ||
303 | install_task_name | ||
304 | ], | ||
305 | "command": gdb_cross_config.gdbserver_script, | ||
306 | "problemMatcher": [ | ||
307 | { | ||
308 | "pattern": [ | ||
309 | { | ||
310 | "regexp": ".", | ||
311 | "file": 1, | ||
312 | "location": 2, | ||
313 | "message": 3 | ||
314 | } | ||
315 | ], | ||
316 | "background": { | ||
317 | "activeOnStart": True, | ||
318 | "beginsPattern": ".", | ||
319 | "endsPattern": ".", | ||
320 | } | ||
321 | } | ||
322 | ] | ||
323 | }) | ||
324 | tasks_file = 'tasks.json' | ||
325 | IdeBase.update_json_file( | ||
326 | self.dot_code_dir(modified_recipe), tasks_file, tasks_dict) | ||
327 | |||
328 | def vscode_tasks_fallback(self, args, modified_recipe): | ||
329 | oe_init_dir = modified_recipe.oe_init_dir | ||
330 | oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir) | ||
331 | dt_build = "devtool build " | ||
332 | dt_build_label = dt_build + modified_recipe.recipe_id_pretty | ||
333 | dt_build_cmd = dt_build + modified_recipe.bpn | ||
334 | clean_opt = " --clean" | ||
335 | dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt | ||
336 | dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt | ||
337 | dt_deploy = "devtool deploy-target " | ||
338 | dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty | ||
339 | dt_deploy_cmd = dt_deploy + modified_recipe.bpn | ||
340 | dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty | ||
341 | deploy_opts = ' '.join(get_devtool_deploy_opts(args)) | ||
342 | tasks_dict = { | ||
343 | "version": "2.0.0", | ||
344 | "tasks": [ | ||
345 | { | ||
346 | "label": dt_build_label, | ||
347 | "type": "shell", | ||
348 | "command": "bash", | ||
349 | "linux": { | ||
350 | "options": { | ||
351 | "cwd": oe_init_dir | ||
352 | } | ||
353 | }, | ||
354 | "args": [ | ||
355 | "--login", | ||
356 | "-c", | ||
357 | "%s%s" % (oe_init, dt_build_cmd) | ||
358 | ], | ||
359 | "problemMatcher": [] | ||
360 | }, | ||
361 | { | ||
362 | "label": dt_deploy_label, | ||
363 | "type": "shell", | ||
364 | "command": "bash", | ||
365 | "linux": { | ||
366 | "options": { | ||
367 | "cwd": oe_init_dir | ||
368 | } | ||
369 | }, | ||
370 | "args": [ | ||
371 | "--login", | ||
372 | "-c", | ||
373 | "%s%s %s" % ( | ||
374 | oe_init, dt_deploy_cmd, deploy_opts) | ||
375 | ], | ||
376 | "problemMatcher": [] | ||
377 | }, | ||
378 | { | ||
379 | "label": dt_build_deploy_label, | ||
380 | "dependsOrder": "sequence", | ||
381 | "dependsOn": [ | ||
382 | dt_build_label, | ||
383 | dt_deploy_label | ||
384 | ], | ||
385 | "problemMatcher": [], | ||
386 | "group": { | ||
387 | "kind": "build", | ||
388 | "isDefault": True | ||
389 | } | ||
390 | }, | ||
391 | { | ||
392 | "label": dt_build_clean_label, | ||
393 | "type": "shell", | ||
394 | "command": "bash", | ||
395 | "linux": { | ||
396 | "options": { | ||
397 | "cwd": oe_init_dir | ||
398 | } | ||
399 | }, | ||
400 | "args": [ | ||
401 | "--login", | ||
402 | "-c", | ||
403 | "%s%s" % (oe_init, dt_build_clean_cmd) | ||
404 | ], | ||
405 | "problemMatcher": [] | ||
406 | } | ||
407 | ] | ||
408 | } | ||
409 | if modified_recipe.gdb_cross: | ||
410 | for gdb_cross_config in self.gdb_cross_configs: | ||
411 | if gdb_cross_config.modified_recipe is not modified_recipe: | ||
412 | continue | ||
413 | tasks_dict['tasks'].append( | ||
414 | { | ||
415 | "label": gdb_cross_config.id_pretty, | ||
416 | "type": "shell", | ||
417 | "isBackground": True, | ||
418 | "dependsOn": [ | ||
419 | dt_build_deploy_label | ||
420 | ], | ||
421 | "command": gdb_cross_config.gdbserver_script, | ||
422 | "problemMatcher": [ | ||
423 | { | ||
424 | "pattern": [ | ||
425 | { | ||
426 | "regexp": ".", | ||
427 | "file": 1, | ||
428 | "location": 2, | ||
429 | "message": 3 | ||
430 | } | ||
431 | ], | ||
432 | "background": { | ||
433 | "activeOnStart": True, | ||
434 | "beginsPattern": ".", | ||
435 | "endsPattern": ".", | ||
436 | } | ||
437 | } | ||
438 | ] | ||
439 | }) | ||
440 | tasks_file = 'tasks.json' | ||
441 | IdeBase.update_json_file( | ||
442 | self.dot_code_dir(modified_recipe), tasks_file, tasks_dict) | ||
443 | |||
444 | def vscode_tasks(self, args, modified_recipe): | ||
445 | if modified_recipe.build_tool.is_c_ccp: | ||
446 | self.vscode_tasks_cpp(args, modified_recipe) | ||
447 | else: | ||
448 | self.vscode_tasks_fallback(args, modified_recipe) | ||
449 | |||
450 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
451 | self.vscode_settings(modified_recipe, image_recipe) | ||
452 | self.vscode_extensions(modified_recipe) | ||
453 | self.vscode_c_cpp_properties(modified_recipe) | ||
454 | if args.target: | ||
455 | self.initialize_gdb_cross_configs( | ||
456 | image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode) | ||
457 | self.vscode_launch(modified_recipe) | ||
458 | self.vscode_tasks(args, modified_recipe) | ||
459 | |||
460 | |||
461 | def register_ide_plugin(ide_plugins): | ||
462 | ide_plugins['code'] = IdeVSCode | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py new file mode 100644 index 0000000000..f106c5a026 --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/ide_none.py | |||
@@ -0,0 +1,53 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk generic IDE plugin""" | ||
7 | |||
8 | import os | ||
9 | import logging | ||
10 | from devtool.ide_plugins import IdeBase, GdbCrossConfig | ||
11 | |||
12 | logger = logging.getLogger('devtool') | ||
13 | |||
14 | |||
15 | class IdeNone(IdeBase): | ||
16 | """Generate some generic helpers for other IDEs | ||
17 | |||
18 | Modified recipe mode: | ||
19 | Generate some helper scripts for remote debugging with GDB | ||
20 | |||
21 | Shared sysroot mode: | ||
22 | A wrapper for bitbake meta-ide-support and bitbake build-sysroots | ||
23 | """ | ||
24 | |||
25 | def __init__(self): | ||
26 | super().__init__() | ||
27 | |||
28 | def setup_shared_sysroots(self, shared_env): | ||
29 | real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys | ||
30 | deploy_dir_image = shared_env.ide_support.deploy_dir_image | ||
31 | env_script = os.path.join( | ||
32 | deploy_dir_image, 'environment-setup-' + real_multimach_target_sys) | ||
33 | logger.info( | ||
34 | "To use this SDK please source this: %s" % env_script) | ||
35 | |||
36 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
37 | """generate some helper scripts and config files | ||
38 | |||
39 | - Execute the do_install task | ||
40 | - Execute devtool deploy-target | ||
41 | - Generate a gdbinit file per executable | ||
42 | - Generate the oe-scripts sym-link | ||
43 | """ | ||
44 | script_path = modified_recipe.gen_install_deploy_script(args) | ||
45 | logger.info("Created: %s" % script_path) | ||
46 | |||
47 | self.initialize_gdb_cross_configs(image_recipe, modified_recipe) | ||
48 | |||
49 | IdeBase.gen_oe_scrtips_sym_link(modified_recipe) | ||
50 | |||
51 | |||
52 | def register_ide_plugin(ide_plugins): | ||
53 | ide_plugins['none'] = IdeNone | ||
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py new file mode 100755 index 0000000000..931408fa74 --- /dev/null +++ b/scripts/lib/devtool/ide_sdk.py | |||
@@ -0,0 +1,1009 @@ | |||
1 | # Development tool - ide-sdk command plugin | ||
2 | # | ||
3 | # Copyright (C) 2023-2024 Siemens AG | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | """Devtool ide-sdk plugin""" | ||
8 | |||
9 | import json | ||
10 | import logging | ||
11 | import os | ||
12 | import re | ||
13 | import shutil | ||
14 | import stat | ||
15 | import subprocess | ||
16 | import sys | ||
17 | from argparse import RawTextHelpFormatter | ||
18 | from enum import Enum | ||
19 | |||
20 | import scriptutils | ||
21 | import bb | ||
22 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe | ||
23 | from devtool.standard import get_real_srctree | ||
24 | from devtool.ide_plugins import BuildTool | ||
25 | |||
26 | |||
27 | logger = logging.getLogger('devtool') | ||
28 | |||
29 | # dict of classes derived from IdeBase | ||
30 | ide_plugins = {} | ||
31 | |||
32 | |||
33 | class DevtoolIdeMode(Enum): | ||
34 | """Different modes are supported by the ide-sdk plugin. | ||
35 | |||
36 | The enum might be extended by more advanced modes in the future. Some ideas: | ||
37 | - auto: modified if all recipes are modified, shared if none of the recipes is modified. | ||
38 | - mixed: modified mode for modified recipes, shared mode for all other recipes. | ||
39 | """ | ||
40 | |||
41 | modified = 'modified' | ||
42 | shared = 'shared' | ||
43 | |||
44 | |||
45 | class TargetDevice: | ||
46 | """SSH remote login parameters""" | ||
47 | |||
48 | def __init__(self, args): | ||
49 | self.extraoptions = '' | ||
50 | if args.no_host_check: | ||
51 | self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | ||
52 | self.ssh_sshexec = 'ssh' | ||
53 | if args.ssh_exec: | ||
54 | self.ssh_sshexec = args.ssh_exec | ||
55 | self.ssh_port = '' | ||
56 | if args.port: | ||
57 | self.ssh_port = "-p %s" % args.port | ||
58 | if args.key: | ||
59 | self.extraoptions += ' -i %s' % args.key | ||
60 | |||
61 | self.target = args.target | ||
62 | target_sp = args.target.split('@') | ||
63 | if len(target_sp) == 1: | ||
64 | self.login = "" | ||
65 | self.host = target_sp[0] | ||
66 | elif len(target_sp) == 2: | ||
67 | self.login = target_sp[0] | ||
68 | self.host = target_sp[1] | ||
69 | else: | ||
70 | logger.error("Invalid target argument: %s" % args.target) | ||
71 | |||
72 | |||
73 | class RecipeNative: | ||
74 | """Base class for calling bitbake to provide a -native recipe""" | ||
75 | |||
76 | def __init__(self, name, target_arch=None): | ||
77 | self.name = name | ||
78 | self.target_arch = target_arch | ||
79 | self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot'] | ||
80 | self.staging_bindir_native = None | ||
81 | self.target_sys = None | ||
82 | self.__native_bin = None | ||
83 | |||
84 | def _initialize(self, config, workspace, tinfoil): | ||
85 | """Get the parsed recipe""" | ||
86 | recipe_d = parse_recipe( | ||
87 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
88 | if not recipe_d: | ||
89 | raise DevtoolError("Parsing %s recipe failed" % self.name) | ||
90 | self.staging_bindir_native = os.path.realpath( | ||
91 | recipe_d.getVar('STAGING_BINDIR_NATIVE')) | ||
92 | self.target_sys = recipe_d.getVar('TARGET_SYS') | ||
93 | return recipe_d | ||
94 | |||
95 | def initialize(self, config, workspace, tinfoil): | ||
96 | """Basic initialization that can be overridden by a derived class""" | ||
97 | self._initialize(config, workspace, tinfoil) | ||
98 | |||
99 | @property | ||
100 | def native_bin(self): | ||
101 | if not self.__native_bin: | ||
102 | raise DevtoolError("native binary name is not defined.") | ||
103 | return self.__native_bin | ||
104 | |||
105 | |||
106 | class RecipeGdbCross(RecipeNative): | ||
107 | """Handle handle gdb-cross on the host and the gdbserver on the target device""" | ||
108 | |||
109 | def __init__(self, args, target_arch, target_device): | ||
110 | super().__init__('gdb-cross-' + target_arch, target_arch) | ||
111 | self.target_device = target_device | ||
112 | self.gdb = None | ||
113 | self.gdbserver_port_next = int(args.gdbserver_port_start) | ||
114 | self.config_db = {} | ||
115 | |||
116 | def __find_gdbserver(self, config, tinfoil): | ||
117 | """Absolute path of the gdbserver""" | ||
118 | recipe_d_gdb = parse_recipe( | ||
119 | config, tinfoil, 'gdb', appends=True, filter_workspace=False) | ||
120 | if not recipe_d_gdb: | ||
121 | raise DevtoolError("Parsing gdb recipe failed") | ||
122 | return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver') | ||
123 | |||
124 | def initialize(self, config, workspace, tinfoil): | ||
125 | super()._initialize(config, workspace, tinfoil) | ||
126 | gdb_bin = self.target_sys + '-gdb' | ||
127 | gdb_path = os.path.join( | ||
128 | self.staging_bindir_native, self.target_sys, gdb_bin) | ||
129 | self.gdb = gdb_path | ||
130 | self.gdbserver_path = self.__find_gdbserver(config, tinfoil) | ||
131 | |||
132 | @property | ||
133 | def host(self): | ||
134 | return self.target_device.host | ||
135 | |||
136 | |||
137 | class RecipeImage: | ||
138 | """Handle some image recipe related properties | ||
139 | |||
140 | Most workflows require firmware that runs on the target device. | ||
141 | This firmware must be consistent with the setup of the host system. | ||
142 | In particular, the debug symbols must be compatible. For this, the | ||
143 | rootfs must be created as part of the SDK. | ||
144 | """ | ||
145 | |||
146 | def __init__(self, name): | ||
147 | self.combine_dbg_image = False | ||
148 | self.gdbserver_missing = False | ||
149 | self.name = name | ||
150 | self.rootfs = None | ||
151 | self.__rootfs_dbg = None | ||
152 | self.bootstrap_tasks = [self.name + ':do_build'] | ||
153 | |||
154 | def initialize(self, config, tinfoil): | ||
155 | image_d = parse_recipe( | ||
156 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
157 | if not image_d: | ||
158 | raise DevtoolError( | ||
159 | "Parsing image recipe %s failed" % self.name) | ||
160 | |||
161 | self.combine_dbg_image = bb.data.inherits_class( | ||
162 | 'image-combined-dbg', image_d) | ||
163 | |||
164 | workdir = image_d.getVar('WORKDIR') | ||
165 | self.rootfs = os.path.join(workdir, 'rootfs') | ||
166 | if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1": | ||
167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') | ||
168 | |||
169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( | ||
170 | 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES') | ||
171 | |||
172 | @property | ||
173 | def debug_support(self): | ||
174 | return bool(self.rootfs_dbg) | ||
175 | |||
176 | @property | ||
177 | def rootfs_dbg(self): | ||
178 | if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg): | ||
179 | return self.__rootfs_dbg | ||
180 | return None | ||
181 | |||
182 | |||
183 | class RecipeMetaIdeSupport: | ||
184 | """For the shared sysroots mode meta-ide-support is needed | ||
185 | |||
186 | For use cases where just a cross tool-chain is required but | ||
187 | no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support | ||
188 | and bitbake build-sysroots. This also allows to expose the cross-toolchains | ||
189 | to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits. | ||
190 | """ | ||
191 | |||
192 | def __init__(self): | ||
193 | self.bootstrap_tasks = ['meta-ide-support:do_build'] | ||
194 | self.topdir = None | ||
195 | self.datadir = None | ||
196 | self.deploy_dir_image = None | ||
197 | self.build_sys = None | ||
198 | # From toolchain-scripts | ||
199 | self.real_multimach_target_sys = None | ||
200 | |||
201 | def initialize(self, config, tinfoil): | ||
202 | meta_ide_support_d = parse_recipe( | ||
203 | config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False) | ||
204 | if not meta_ide_support_d: | ||
205 | raise DevtoolError("Parsing meta-ide-support recipe failed") | ||
206 | |||
207 | self.topdir = meta_ide_support_d.getVar('TOPDIR') | ||
208 | self.datadir = meta_ide_support_d.getVar('datadir') | ||
209 | self.deploy_dir_image = meta_ide_support_d.getVar( | ||
210 | 'DEPLOY_DIR_IMAGE') | ||
211 | self.build_sys = meta_ide_support_d.getVar('BUILD_SYS') | ||
212 | self.real_multimach_target_sys = meta_ide_support_d.getVar( | ||
213 | 'REAL_MULTIMACH_TARGET_SYS') | ||
214 | |||
215 | |||
216 | class RecipeBuildSysroots: | ||
217 | """For the shared sysroots mode build-sysroots is needed""" | ||
218 | |||
219 | def __init__(self): | ||
220 | self.standalone_sysroot = None | ||
221 | self.standalone_sysroot_native = None | ||
222 | self.bootstrap_tasks = [ | ||
223 | 'build-sysroots:do_build_target_sysroot', | ||
224 | 'build-sysroots:do_build_native_sysroot' | ||
225 | ] | ||
226 | |||
227 | def initialize(self, config, tinfoil): | ||
228 | build_sysroots_d = parse_recipe( | ||
229 | config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False) | ||
230 | if not build_sysroots_d: | ||
231 | raise DevtoolError("Parsing build-sysroots recipe failed") | ||
232 | self.standalone_sysroot = build_sysroots_d.getVar( | ||
233 | 'STANDALONE_SYSROOT') | ||
234 | self.standalone_sysroot_native = build_sysroots_d.getVar( | ||
235 | 'STANDALONE_SYSROOT_NATIVE') | ||
236 | |||
237 | |||
238 | class SharedSysrootsEnv: | ||
239 | """Handle the shared sysroots based workflow | ||
240 | |||
241 | Support the workflow with just a tool-chain without a recipe. | ||
242 | It's basically like: | ||
243 | bitbake some-dependencies | ||
244 | bitbake meta-ide-support | ||
245 | bitbake build-sysroots | ||
246 | Use the environment-* file found in the deploy folder | ||
247 | """ | ||
248 | |||
249 | def __init__(self): | ||
250 | self.ide_support = None | ||
251 | self.build_sysroots = None | ||
252 | |||
253 | def initialize(self, ide_support, build_sysroots): | ||
254 | self.ide_support = ide_support | ||
255 | self.build_sysroots = build_sysroots | ||
256 | |||
257 | def setup_ide(self, ide): | ||
258 | ide.setup(self) | ||
259 | |||
260 | |||
261 | class RecipeNotModified: | ||
262 | """Handling of recipes added to the Direct DSK shared sysroots.""" | ||
263 | |||
264 | def __init__(self, name): | ||
265 | self.name = name | ||
266 | self.bootstrap_tasks = [name + ':do_populate_sysroot'] | ||
267 | |||
268 | |||
269 | class RecipeModified: | ||
270 | """Handling of recipes in the workspace created by devtool modify""" | ||
271 | OE_INIT_BUILD_ENV = 'oe-init-build-env' | ||
272 | |||
273 | VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$") | ||
274 | |||
275 | def __init__(self, name): | ||
276 | self.name = name | ||
277 | self.bootstrap_tasks = [name + ':do_install'] | ||
278 | self.gdb_cross = None | ||
279 | # workspace | ||
280 | self.real_srctree = None | ||
281 | self.srctree = None | ||
282 | self.ide_sdk_dir = None | ||
283 | self.ide_sdk_scripts_dir = None | ||
284 | self.bbappend = None | ||
285 | # recipe variables from d.getVar | ||
286 | self.b = None | ||
287 | self.base_libdir = None | ||
288 | self.bblayers = None | ||
289 | self.bpn = None | ||
290 | self.d = None | ||
291 | self.debug_build = None | ||
292 | self.fakerootcmd = None | ||
293 | self.fakerootenv = None | ||
294 | self.libdir = None | ||
295 | self.max_process = None | ||
296 | self.package_arch = None | ||
297 | self.package_debug_split_style = None | ||
298 | self.path = None | ||
299 | self.pn = None | ||
300 | self.recipe_sysroot = None | ||
301 | self.recipe_sysroot_native = None | ||
302 | self.staging_incdir = None | ||
303 | self.strip_cmd = None | ||
304 | self.target_arch = None | ||
305 | self.target_dbgsrc_dir = None | ||
306 | self.topdir = None | ||
307 | self.workdir = None | ||
308 | self.recipe_id = None | ||
309 | # replicate bitbake build environment | ||
310 | self.exported_vars = None | ||
311 | self.cmd_compile = None | ||
312 | self.__oe_init_dir = None | ||
313 | # main build tool used by this recipe | ||
314 | self.build_tool = BuildTool.UNDEFINED | ||
315 | # build_tool = cmake | ||
316 | self.oecmake_generator = None | ||
317 | self.cmake_cache_vars = None | ||
318 | # build_tool = meson | ||
319 | self.meson_buildtype = None | ||
320 | self.meson_wrapper = None | ||
321 | self.mesonopts = None | ||
322 | self.extra_oemeson = None | ||
323 | self.meson_cross_file = None | ||
324 | |||
325 | def initialize(self, config, workspace, tinfoil): | ||
326 | recipe_d = parse_recipe( | ||
327 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
328 | if not recipe_d: | ||
329 | raise DevtoolError("Parsing %s recipe failed" % self.name) | ||
330 | |||
331 | # Verify this recipe is built as externalsrc setup by devtool modify | ||
332 | workspacepn = check_workspace_recipe( | ||
333 | workspace, self.name, bbclassextend=True) | ||
334 | self.srctree = workspace[workspacepn]['srctree'] | ||
335 | # Need to grab this here in case the source is within a subdirectory | ||
336 | self.real_srctree = get_real_srctree( | ||
337 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR')) | ||
338 | self.bbappend = workspace[workspacepn]['bbappend'] | ||
339 | |||
340 | self.ide_sdk_dir = os.path.join( | ||
341 | config.workspace_path, 'ide-sdk', self.name) | ||
342 | if os.path.exists(self.ide_sdk_dir): | ||
343 | shutil.rmtree(self.ide_sdk_dir) | ||
344 | self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts') | ||
345 | |||
346 | self.b = recipe_d.getVar('B') | ||
347 | self.base_libdir = recipe_d.getVar('base_libdir') | ||
348 | self.bblayers = recipe_d.getVar('BBLAYERS').split() | ||
349 | self.bpn = recipe_d.getVar('BPN') | ||
350 | self.cxx = recipe_d.getVar('CXX') | ||
351 | self.d = recipe_d.getVar('D') | ||
352 | self.debug_build = recipe_d.getVar('DEBUG_BUILD') | ||
353 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') | ||
354 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') | ||
355 | self.libdir = recipe_d.getVar('libdir') | ||
356 | self.max_process = int(recipe_d.getVar( | ||
357 | "BB_NUMBER_THREADS") or os.cpu_count() or 1) | ||
358 | self.package_arch = recipe_d.getVar('PACKAGE_ARCH') | ||
359 | self.package_debug_split_style = recipe_d.getVar( | ||
360 | 'PACKAGE_DEBUG_SPLIT_STYLE') | ||
361 | self.path = recipe_d.getVar('PATH') | ||
362 | self.pn = recipe_d.getVar('PN') | ||
363 | self.recipe_sysroot = os.path.realpath( | ||
364 | recipe_d.getVar('RECIPE_SYSROOT')) | ||
365 | self.recipe_sysroot_native = os.path.realpath( | ||
366 | recipe_d.getVar('RECIPE_SYSROOT_NATIVE')) | ||
367 | self.staging_bindir_toolchain = os.path.realpath( | ||
368 | recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN')) | ||
369 | self.staging_incdir = os.path.realpath( | ||
370 | recipe_d.getVar('STAGING_INCDIR')) | ||
371 | self.strip_cmd = recipe_d.getVar('STRIP') | ||
372 | self.target_arch = recipe_d.getVar('TARGET_ARCH') | ||
373 | self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR') | ||
374 | self.topdir = recipe_d.getVar('TOPDIR') | ||
375 | self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR')) | ||
376 | |||
377 | self.__init_exported_variables(recipe_d) | ||
378 | |||
379 | if bb.data.inherits_class('cmake', recipe_d): | ||
380 | self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR') | ||
381 | self.__init_cmake_preset_cache(recipe_d) | ||
382 | self.build_tool = BuildTool.CMAKE | ||
383 | elif bb.data.inherits_class('meson', recipe_d): | ||
384 | self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE') | ||
385 | self.mesonopts = recipe_d.getVar('MESONOPTS') | ||
386 | self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON') | ||
387 | self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE') | ||
388 | self.build_tool = BuildTool.MESON | ||
389 | |||
390 | # Recipe ID is the identifier for IDE config sections | ||
391 | self.recipe_id = self.bpn + "-" + self.package_arch | ||
392 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch | ||
393 | |||
394 | @staticmethod | ||
395 | def is_valid_shell_variable(var): | ||
396 | """Skip strange shell variables like systemd | ||
397 | |||
398 | prevent from strange bugs because of strange variables which | ||
399 | are not used in this context but break various tools. | ||
400 | """ | ||
401 | if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var): | ||
402 | bb.debug(1, "ignoring variable: %s" % var) | ||
403 | return True | ||
404 | return False | ||
405 | |||
406 | def solib_search_path(self, image): | ||
407 | """Search for debug symbols in the rootfs and rootfs-dbg | ||
408 | |||
409 | The debug symbols of shared libraries which are provided by other packages | ||
410 | are grabbed from the -dbg packages in the rootfs-dbg. | ||
411 | |||
412 | But most cross debugging tools like gdb, perf, and systemtap need to find | ||
413 | executable/library first and through it debuglink note find corresponding | ||
414 | symbols file. Therefore the library paths from the rootfs are added as well. | ||
415 | |||
416 | Note: For the devtool modified recipe compiled from the IDE, the debug | ||
417 | symbols are taken from the unstripped binaries in the image folder. | ||
418 | Also, devtool deploy-target takes the files from the image folder. | ||
419 | debug symbols in the image folder refer to the corresponding source files | ||
420 | with absolute paths of the build machine. Debug symbols found in the | ||
421 | rootfs-dbg are relocated and contain paths which refer to the source files | ||
422 | installed on the target device e.g. /usr/src/... | ||
423 | """ | ||
424 | base_libdir = self.base_libdir.lstrip('/') | ||
425 | libdir = self.libdir.lstrip('/') | ||
426 | so_paths = [ | ||
427 | # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug | ||
428 | os.path.join(image.rootfs_dbg, base_libdir, ".debug"), | ||
429 | os.path.join(image.rootfs_dbg, libdir, ".debug"), | ||
430 | # debug symbols for package_debug_split_style: debug-file-directory | ||
431 | os.path.join(image.rootfs_dbg, "usr", "lib", "debug"), | ||
432 | |||
433 | # The binaries are required as well, the debug packages are not enough | ||
434 | # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg | ||
435 | os.path.join(image.rootfs_dbg, base_libdir), | ||
436 | os.path.join(image.rootfs_dbg, libdir), | ||
437 | # Without image-combined-dbg.bbclass the binaries are only in rootfs. | ||
438 | # Note: Stepping into source files located in rootfs-dbg does not | ||
439 | # work without image-combined-dbg.bbclass yet. | ||
440 | os.path.join(image.rootfs, base_libdir), | ||
441 | os.path.join(image.rootfs, libdir) | ||
442 | ] | ||
443 | return so_paths | ||
444 | |||
445 | def solib_search_path_str(self, image): | ||
446 | """Return a : separated list of paths usable by GDB's set solib-search-path""" | ||
447 | return ':'.join(self.solib_search_path(image)) | ||
448 | |||
449 | def __init_exported_variables(self, d): | ||
450 | """Find all variables with export flag set. | ||
451 | |||
452 | This allows to generate IDE configurations which compile with the same | ||
453 | environment as bitbake does. That's at least a reasonable default behavior. | ||
454 | """ | ||
455 | exported_vars = {} | ||
456 | |||
457 | vars = (key for key in d.keys() if not key.startswith( | ||
458 | "__") and not d.getVarFlag(key, "func", False)) | ||
459 | for var in sorted(vars): | ||
460 | func = d.getVarFlag(var, "func", False) | ||
461 | if d.getVarFlag(var, 'python', False) and func: | ||
462 | continue | ||
463 | export = d.getVarFlag(var, "export", False) | ||
464 | unexport = d.getVarFlag(var, "unexport", False) | ||
465 | if not export and not unexport and not func: | ||
466 | continue | ||
467 | if unexport: | ||
468 | continue | ||
469 | |||
470 | val = d.getVar(var) | ||
471 | if val is None: | ||
472 | continue | ||
473 | if set(var) & set("-.{}+"): | ||
474 | logger.warn( | ||
475 | "Warning: Found invalid character in variable name %s", str(var)) | ||
476 | continue | ||
477 | varExpanded = d.expand(var) | ||
478 | val = str(val) | ||
479 | |||
480 | if not RecipeModified.is_valid_shell_variable(varExpanded): | ||
481 | continue | ||
482 | |||
483 | if func: | ||
484 | code_line = "line: {0}, file: {1}\n".format( | ||
485 | d.getVarFlag(var, "lineno", False), | ||
486 | d.getVarFlag(var, "filename", False)) | ||
487 | val = val.rstrip('\n') | ||
488 | logger.warn("Warning: exported shell function %s() is not exported (%s)" % | ||
489 | (varExpanded, code_line)) | ||
490 | continue | ||
491 | |||
492 | if export: | ||
493 | exported_vars[varExpanded] = val.strip() | ||
494 | continue | ||
495 | |||
496 | self.exported_vars = exported_vars | ||
497 | |||
498 | def __init_cmake_preset_cache(self, d): | ||
499 | """Get the arguments passed to cmake | ||
500 | |||
501 | Replicate the cmake configure arguments with all details to | ||
502 | share on build folder between bitbake and SDK. | ||
503 | """ | ||
504 | site_file = os.path.join(self.workdir, 'site-file.cmake') | ||
505 | if os.path.exists(site_file): | ||
506 | print("Warning: site-file.cmake is not supported") | ||
507 | |||
508 | cache_vars = {} | ||
509 | oecmake_args = d.getVar('OECMAKE_ARGS').split() | ||
510 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() | ||
511 | for param in sorted(oecmake_args + extra_oecmake): | ||
512 | d_pref = "-D" | ||
513 | if param.startswith(d_pref): | ||
514 | param = param[len(d_pref):] | ||
515 | else: | ||
516 | print("Error: expected a -D") | ||
517 | param_s = param.split('=', 1) | ||
518 | param_nt = param_s[0].split(':', 1) | ||
519 | |||
520 | def handle_undefined_variable(var): | ||
521 | if var.startswith('${') and var.endswith('}'): | ||
522 | return '' | ||
523 | else: | ||
524 | return var | ||
525 | # Example: FOO=ON | ||
526 | if len(param_nt) == 1: | ||
527 | cache_vars[param_s[0]] = handle_undefined_variable(param_s[1]) | ||
528 | # Example: FOO:PATH=/tmp | ||
529 | elif len(param_nt) == 2: | ||
530 | cache_vars[param_nt[0]] = { | ||
531 | "type": param_nt[1], | ||
532 | "value": handle_undefined_variable(param_s[1]), | ||
533 | } | ||
534 | else: | ||
535 | print("Error: cannot parse %s" % param) | ||
536 | self.cmake_cache_vars = cache_vars | ||
537 | |||
538 | def cmake_preset(self): | ||
539 | """Create a preset for cmake that mimics how bitbake calls cmake""" | ||
540 | toolchain_file = os.path.join(self.workdir, 'toolchain.cmake') | ||
541 | cmake_executable = os.path.join( | ||
542 | self.recipe_sysroot_native, 'usr', 'bin', 'cmake') | ||
543 | self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id | ||
544 | |||
545 | preset_dict_configure = { | ||
546 | "name": self.recipe_id, | ||
547 | "displayName": self.recipe_id_pretty, | ||
548 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
549 | "binaryDir": self.b, | ||
550 | "generator": self.oecmake_generator, | ||
551 | "toolchainFile": toolchain_file, | ||
552 | "cacheVariables": self.cmake_cache_vars, | ||
553 | "environment": self.exported_vars, | ||
554 | "cmakeExecutable": cmake_executable | ||
555 | } | ||
556 | |||
557 | preset_dict_build = { | ||
558 | "name": self.recipe_id, | ||
559 | "displayName": self.recipe_id_pretty, | ||
560 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
561 | "configurePreset": self.recipe_id, | ||
562 | "inheritConfigureEnvironment": True | ||
563 | } | ||
564 | |||
565 | preset_dict_test = { | ||
566 | "name": self.recipe_id, | ||
567 | "displayName": self.recipe_id_pretty, | ||
568 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
569 | "configurePreset": self.recipe_id, | ||
570 | "inheritConfigureEnvironment": True | ||
571 | } | ||
572 | |||
573 | preset_dict = { | ||
574 | "version": 3, # cmake 3.21, backward compatible with kirkstone | ||
575 | "configurePresets": [preset_dict_configure], | ||
576 | "buildPresets": [preset_dict_build], | ||
577 | "testPresets": [preset_dict_test] | ||
578 | } | ||
579 | |||
580 | # Finally write the json file | ||
581 | json_file = 'CMakeUserPresets.json' | ||
582 | json_path = os.path.join(self.real_srctree, json_file) | ||
583 | logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path)) | ||
584 | if not os.path.exists(self.real_srctree): | ||
585 | os.makedirs(self.real_srctree) | ||
586 | try: | ||
587 | with open(json_path) as f: | ||
588 | orig_dict = json.load(f) | ||
589 | except json.decoder.JSONDecodeError: | ||
590 | logger.info( | ||
591 | "Decoding %s failed. Probably because of comments in the json file" % json_path) | ||
592 | orig_dict = {} | ||
593 | except FileNotFoundError: | ||
594 | orig_dict = {} | ||
595 | |||
596 | # Add or update the presets for the recipe and keep other presets | ||
597 | for k, v in preset_dict.items(): | ||
598 | if isinstance(v, list): | ||
599 | update_preset = v[0] | ||
600 | preset_added = False | ||
601 | if k in orig_dict: | ||
602 | for index, orig_preset in enumerate(orig_dict[k]): | ||
603 | if 'name' in orig_preset: | ||
604 | if orig_preset['name'] == update_preset['name']: | ||
605 | logger.debug("Updating preset: %s" % | ||
606 | orig_preset['name']) | ||
607 | orig_dict[k][index] = update_preset | ||
608 | preset_added = True | ||
609 | break | ||
610 | else: | ||
611 | logger.debug("keeping preset: %s" % | ||
612 | orig_preset['name']) | ||
613 | else: | ||
614 | logger.warn("preset without a name found") | ||
615 | if not preset_added: | ||
616 | if not k in orig_dict: | ||
617 | orig_dict[k] = [] | ||
618 | orig_dict[k].append(update_preset) | ||
619 | logger.debug("Added preset: %s" % | ||
620 | update_preset['name']) | ||
621 | else: | ||
622 | orig_dict[k] = v | ||
623 | |||
624 | with open(json_path, 'w') as f: | ||
625 | json.dump(orig_dict, f, indent=4) | ||
626 | |||
627 | def gen_meson_wrapper(self): | ||
628 | """Generate a wrapper script to call meson with the cross environment""" | ||
629 | bb.utils.mkdirhier(self.ide_sdk_scripts_dir) | ||
630 | meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson') | ||
631 | meson_real = os.path.join( | ||
632 | self.recipe_sysroot_native, 'usr', 'bin', 'meson.real') | ||
633 | with open(meson_wrapper, 'w') as mwrap: | ||
634 | mwrap.write("#!/bin/sh" + os.linesep) | ||
635 | for var, val in self.exported_vars.items(): | ||
636 | mwrap.write('export %s="%s"' % (var, val) + os.linesep) | ||
637 | mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep) | ||
638 | private_temp = os.path.join(self.b, "meson-private", "tmp") | ||
639 | mwrap.write('mkdir -p "%s"' % private_temp + os.linesep) | ||
640 | mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep) | ||
641 | mwrap.write('exec "%s" "$@"' % meson_real + os.linesep) | ||
642 | st = os.stat(meson_wrapper) | ||
643 | os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC) | ||
644 | self.meson_wrapper = meson_wrapper | ||
645 | self.cmd_compile = meson_wrapper + " compile -C " + self.b | ||
646 | |||
647 | def which(self, executable): | ||
648 | bin_path = shutil.which(executable, path=self.path) | ||
649 | if not bin_path: | ||
650 | raise DevtoolError( | ||
651 | 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn)) | ||
652 | return bin_path | ||
653 | |||
654 | @staticmethod | ||
655 | def is_elf_file(file_path): | ||
656 | with open(file_path, "rb") as f: | ||
657 | data = f.read(4) | ||
658 | if data == b'\x7fELF': | ||
659 | return True | ||
660 | return False | ||
661 | |||
662 | def find_installed_binaries(self): | ||
663 | """find all executable elf files in the image directory""" | ||
664 | binaries = [] | ||
665 | d_len = len(self.d) | ||
666 | re_so = re.compile(r'.*\.so[.0-9]*$') | ||
667 | for root, _, files in os.walk(self.d, followlinks=False): | ||
668 | for file in files: | ||
669 | if os.path.islink(file): | ||
670 | continue | ||
671 | if re_so.match(file): | ||
672 | continue | ||
673 | abs_name = os.path.join(root, file) | ||
674 | if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name): | ||
675 | binaries.append(abs_name[d_len:]) | ||
676 | return sorted(binaries) | ||
677 | |||
678 | def gen_deploy_target_script(self, args): | ||
679 | """Generate a script which does what devtool deploy-target does | ||
680 | |||
681 | This script is much quicker than devtool target-deploy. Because it | ||
682 | does not need to start a bitbake server. All information from tinfoil | ||
683 | is hard-coded in the generated script. | ||
684 | """ | ||
685 | cmd_lines = ['#!%s' % str(sys.executable)] | ||
686 | cmd_lines.append('import sys') | ||
687 | cmd_lines.append('devtool_sys_path = %s' % str(sys.path)) | ||
688 | cmd_lines.append('devtool_sys_path.reverse()') | ||
689 | cmd_lines.append('for p in devtool_sys_path:') | ||
690 | cmd_lines.append(' if p not in sys.path:') | ||
691 | cmd_lines.append(' sys.path.insert(0, p)') | ||
692 | cmd_lines.append('from devtool.deploy import deploy_no_d') | ||
693 | args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check', | ||
694 | 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target'] | ||
695 | filtered_args_dict = {key: value for key, value in vars( | ||
696 | args).items() if key in args_filter} | ||
697 | cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict)) | ||
698 | cmd_lines.append('class Dict2Class(object):') | ||
699 | cmd_lines.append(' def __init__(self, my_dict):') | ||
700 | cmd_lines.append(' for key in my_dict:') | ||
701 | cmd_lines.append(' setattr(self, key, my_dict[key])') | ||
702 | cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)') | ||
703 | cmd_lines.append( | ||
704 | 'setattr(filtered_args, "recipename", "%s")' % self.bpn) | ||
705 | cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' % | ||
706 | (self.d, self.workdir, self.path, self.strip_cmd, | ||
707 | self.libdir, self.base_libdir, self.max_process, | ||
708 | self.fakerootcmd, self.fakerootenv)) | ||
709 | return self.write_script(cmd_lines, 'deploy_target') | ||
710 | |||
711 | def gen_install_deploy_script(self, args): | ||
712 | """Generate a script which does install and deploy""" | ||
713 | cmd_lines = ['#!/bin/bash'] | ||
714 | |||
715 | # . oe-init-build-env $BUILDDIR | ||
716 | # Note: Sourcing scripts with arguments requires bash | ||
717 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( | ||
718 | self.oe_init_dir, self.oe_init_dir)) | ||
719 | cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % ( | ||
720 | self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir)) | ||
721 | |||
722 | # bitbake -c install | ||
723 | cmd_lines.append( | ||
724 | 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn)) | ||
725 | |||
726 | # Self contained devtool deploy-target | ||
727 | cmd_lines.append(self.gen_deploy_target_script(args)) | ||
728 | |||
729 | return self.write_script(cmd_lines, 'install_and_deploy') | ||
730 | |||
731 | def write_script(self, cmd_lines, script_name): | ||
732 | bb.utils.mkdirhier(self.ide_sdk_scripts_dir) | ||
733 | script_name_arch = script_name + '_' + self.recipe_id | ||
734 | script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch) | ||
735 | with open(script_file, 'w') as script_f: | ||
736 | script_f.write(os.linesep.join(cmd_lines)) | ||
737 | st = os.stat(script_file) | ||
738 | os.chmod(script_file, st.st_mode | stat.S_IEXEC) | ||
739 | return script_file | ||
740 | |||
741 | @property | ||
742 | def oe_init_build_env(self): | ||
743 | """Find the oe-init-build-env used for this setup""" | ||
744 | oe_init_dir = self.oe_init_dir | ||
745 | if oe_init_dir: | ||
746 | return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV) | ||
747 | return None | ||
748 | |||
749 | @property | ||
750 | def oe_init_dir(self): | ||
751 | """Find the directory where the oe-init-build-env is located | ||
752 | |||
753 | Assumption: There might be a layer with higher priority than poky | ||
754 | which provides to oe-init-build-env in the layer's toplevel folder. | ||
755 | """ | ||
756 | if not self.__oe_init_dir: | ||
757 | for layer in reversed(self.bblayers): | ||
758 | result = subprocess.run( | ||
759 | ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True) | ||
760 | if result.returncode == 0: | ||
761 | oe_init_dir = result.stdout.decode('utf-8').strip() | ||
762 | oe_init_path = os.path.join( | ||
763 | oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV) | ||
764 | if os.path.exists(oe_init_path): | ||
765 | logger.debug("Using %s from: %s" % ( | ||
766 | RecipeModified.OE_INIT_BUILD_ENV, oe_init_path)) | ||
767 | self.__oe_init_dir = oe_init_dir | ||
768 | break | ||
769 | if not self.__oe_init_dir: | ||
770 | logger.error("Cannot find the bitbake top level folder") | ||
771 | return self.__oe_init_dir | ||
772 | |||
773 | |||
774 | def ide_setup(args, config, basepath, workspace): | ||
775 | """Generate the IDE configuration for the workspace""" | ||
776 | |||
777 | # Explicitely passing some special recipes does not make sense | ||
778 | for recipe in args.recipenames: | ||
779 | if recipe in ['meta-ide-support', 'build-sysroots']: | ||
780 | raise DevtoolError("Invalid recipe: %s." % recipe) | ||
781 | |||
782 | # Collect information about tasks which need to be bitbaked | ||
783 | bootstrap_tasks = [] | ||
784 | bootstrap_tasks_late = [] | ||
785 | tinfoil = setup_tinfoil(config_only=False, basepath=basepath) | ||
786 | try: | ||
787 | # define mode depending on recipes which need to be processed | ||
788 | recipes_image_names = [] | ||
789 | recipes_modified_names = [] | ||
790 | recipes_other_names = [] | ||
791 | for recipe in args.recipenames: | ||
792 | try: | ||
793 | check_workspace_recipe( | ||
794 | workspace, recipe, bbclassextend=True) | ||
795 | recipes_modified_names.append(recipe) | ||
796 | except DevtoolError: | ||
797 | recipe_d = parse_recipe( | ||
798 | config, tinfoil, recipe, appends=True, filter_workspace=False) | ||
799 | if not recipe_d: | ||
800 | raise DevtoolError("Parsing recipe %s failed" % recipe) | ||
801 | if bb.data.inherits_class('image', recipe_d): | ||
802 | recipes_image_names.append(recipe) | ||
803 | else: | ||
804 | recipes_other_names.append(recipe) | ||
805 | |||
806 | invalid_params = False | ||
807 | if args.mode == DevtoolIdeMode.shared: | ||
808 | if len(recipes_modified_names): | ||
809 | logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str( | ||
810 | recipes_modified_names)) | ||
811 | invalid_params = True | ||
812 | if args.mode == DevtoolIdeMode.modified: | ||
813 | if len(recipes_other_names): | ||
814 | logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str( | ||
815 | recipes_other_names)) | ||
816 | invalid_params = True | ||
817 | if len(recipes_image_names) != 1: | ||
818 | logger.error( | ||
819 | "One image recipe is required as the rootfs for the remote development.") | ||
820 | invalid_params = True | ||
821 | for modified_recipe_name in recipes_modified_names: | ||
822 | if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'): | ||
823 | logger.error( | ||
824 | "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name) | ||
825 | invalid_params = True | ||
826 | |||
827 | if invalid_params: | ||
828 | raise DevtoolError("Invalid parameters are passed.") | ||
829 | |||
830 | # For the shared sysroots mode, add all dependencies of all the images to the sysroots | ||
831 | # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg | ||
832 | recipes_images = [] | ||
833 | for recipes_image_name in recipes_image_names: | ||
834 | logger.info("Using image: %s" % recipes_image_name) | ||
835 | recipe_image = RecipeImage(recipes_image_name) | ||
836 | recipe_image.initialize(config, tinfoil) | ||
837 | bootstrap_tasks += recipe_image.bootstrap_tasks | ||
838 | recipes_images.append(recipe_image) | ||
839 | |||
840 | # Provide a Direct SDK with shared sysroots | ||
841 | recipes_not_modified = [] | ||
842 | if args.mode == DevtoolIdeMode.shared: | ||
843 | ide_support = RecipeMetaIdeSupport() | ||
844 | ide_support.initialize(config, tinfoil) | ||
845 | bootstrap_tasks += ide_support.bootstrap_tasks | ||
846 | |||
847 | logger.info("Adding %s to the Direct SDK sysroots." % | ||
848 | str(recipes_other_names)) | ||
849 | for recipe_name in recipes_other_names: | ||
850 | recipe_not_modified = RecipeNotModified(recipe_name) | ||
851 | bootstrap_tasks += recipe_not_modified.bootstrap_tasks | ||
852 | recipes_not_modified.append(recipe_not_modified) | ||
853 | |||
854 | build_sysroots = RecipeBuildSysroots() | ||
855 | build_sysroots.initialize(config, tinfoil) | ||
856 | bootstrap_tasks_late += build_sysroots.bootstrap_tasks | ||
857 | shared_env = SharedSysrootsEnv() | ||
858 | shared_env.initialize(ide_support, build_sysroots) | ||
859 | |||
860 | recipes_modified = [] | ||
861 | if args.mode == DevtoolIdeMode.modified: | ||
862 | logger.info("Setting up workspaces for modified recipe: %s" % | ||
863 | str(recipes_modified_names)) | ||
864 | gdbs_cross = {} | ||
865 | for recipe_name in recipes_modified_names: | ||
866 | recipe_modified = RecipeModified(recipe_name) | ||
867 | recipe_modified.initialize(config, workspace, tinfoil) | ||
868 | bootstrap_tasks += recipe_modified.bootstrap_tasks | ||
869 | recipes_modified.append(recipe_modified) | ||
870 | |||
871 | if recipe_modified.target_arch not in gdbs_cross: | ||
872 | target_device = TargetDevice(args) | ||
873 | gdb_cross = RecipeGdbCross( | ||
874 | args, recipe_modified.target_arch, target_device) | ||
875 | gdb_cross.initialize(config, workspace, tinfoil) | ||
876 | bootstrap_tasks += gdb_cross.bootstrap_tasks | ||
877 | gdbs_cross[recipe_modified.target_arch] = gdb_cross | ||
878 | recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch] | ||
879 | |||
880 | finally: | ||
881 | tinfoil.shutdown() | ||
882 | |||
883 | if not args.skip_bitbake: | ||
884 | bb_cmd = 'bitbake ' | ||
885 | if args.bitbake_k: | ||
886 | bb_cmd += "-k " | ||
887 | bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks) | ||
888 | exec_build_env_command( | ||
889 | config.init_path, basepath, bb_cmd_early, watch=True) | ||
890 | if bootstrap_tasks_late: | ||
891 | bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late) | ||
892 | exec_build_env_command( | ||
893 | config.init_path, basepath, bb_cmd_late, watch=True) | ||
894 | |||
895 | for recipe_image in recipes_images: | ||
896 | if (recipe_image.gdbserver_missing): | ||
897 | logger.warning( | ||
898 | "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image) | ||
899 | |||
900 | if recipe_image.combine_dbg_image is False: | ||
901 | logger.warning( | ||
902 | 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image) | ||
903 | |||
904 | # Instantiate the active IDE plugin | ||
905 | ide = ide_plugins[args.ide]() | ||
906 | if args.mode == DevtoolIdeMode.shared: | ||
907 | ide.setup_shared_sysroots(shared_env) | ||
908 | elif args.mode == DevtoolIdeMode.modified: | ||
909 | for recipe_modified in recipes_modified: | ||
910 | if recipe_modified.build_tool is BuildTool.CMAKE: | ||
911 | recipe_modified.cmake_preset() | ||
912 | if recipe_modified.build_tool is BuildTool.MESON: | ||
913 | recipe_modified.gen_meson_wrapper() | ||
914 | ide.setup_modified_recipe( | ||
915 | args, recipe_image, recipe_modified) | ||
916 | |||
917 | if recipe_modified.debug_build != '1': | ||
918 | logger.warn( | ||
919 | 'Recipe %s is compiled with release build configuration. ' | ||
920 | 'You might want to add DEBUG_BUILD = "1" to %s. ' | ||
921 | 'Note that devtool modify --debug-build can do this automatically.', | ||
922 | recipe_modified.name, recipe_modified.bbappend) | ||
923 | else: | ||
924 | raise DevtoolError("Must not end up here.") | ||
925 | |||
926 | |||
927 | def register_commands(subparsers, context): | ||
928 | """Register devtool subcommands from this plugin""" | ||
929 | |||
930 | # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE | ||
931 | # configuration is generated. In the case of the eSDK, the bootstrapping is performed | ||
932 | # during the installation of the eSDK installer. Running the ide-sdk plugin from an | ||
933 | # eSDK installer-based setup would require skipping the bootstrapping and probably | ||
934 | # taking some other differences into account when generating the IDE configurations. | ||
935 | # This would be possible. But it is not implemented. | ||
936 | if context.fixed_setup: | ||
937 | return | ||
938 | |||
939 | global ide_plugins | ||
940 | |||
941 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. | ||
942 | pluginpaths = [os.path.join(path, 'ide_plugins') | ||
943 | for path in context.pluginpaths] | ||
944 | ide_plugin_modules = [] | ||
945 | for pluginpath in pluginpaths: | ||
946 | scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath) | ||
947 | |||
948 | for ide_plugin_module in ide_plugin_modules: | ||
949 | if hasattr(ide_plugin_module, 'register_ide_plugin'): | ||
950 | ide_plugin_module.register_ide_plugin(ide_plugins) | ||
951 | # Sort plugins according to their priority. The first entry is the default IDE plugin. | ||
952 | ide_plugins = dict(sorted(ide_plugins.items(), | ||
953 | key=lambda p: p[1].ide_plugin_priority(), reverse=True)) | ||
954 | |||
955 | parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter, | ||
956 | help='Setup the SDK and configure the IDE') | ||
957 | parser_ide_sdk.add_argument( | ||
958 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' | ||
959 | 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.') | ||
960 | parser_ide_sdk.add_argument( | ||
961 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, | ||
962 | help='Different SDK types are supported:\n' | ||
963 | '- "' + DevtoolIdeMode.modified.name + '" (default):\n' | ||
964 | ' devtool modify creates a workspace to work on the source code of a recipe.\n' | ||
965 | ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n' | ||
966 | ' Usage example:\n' | ||
967 | ' devtool modify cmake-example\n' | ||
968 | ' devtool ide-sdk cmake-example core-image-minimal\n' | ||
969 | ' Start the IDE in the workspace folder\n' | ||
970 | ' At least one devtool modified recipe plus one image recipe are required:\n' | ||
971 | ' The image recipe is used to generate the target image and the remote debug configuration.\n' | ||
972 | '- "' + DevtoolIdeMode.shared.name + '":\n' | ||
973 | ' Usage example:\n' | ||
974 | ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n' | ||
975 | ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n' | ||
976 | ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n' | ||
977 | ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit') | ||
978 | default_ide = list(ide_plugins.keys())[0] | ||
979 | parser_ide_sdk.add_argument( | ||
980 | '-i', '--ide', choices=ide_plugins.keys(), default=default_ide, | ||
981 | help='Setup the configuration for this IDE (default: %s)' % default_ide) | ||
982 | parser_ide_sdk.add_argument( | ||
983 | '-t', '--target', default='root@192.168.7.2', | ||
984 | help='Live target machine running an ssh server: user@hostname.') | ||
985 | parser_ide_sdk.add_argument( | ||
986 | '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.') | ||
987 | parser_ide_sdk.add_argument( | ||
988 | '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true') | ||
989 | parser_ide_sdk.add_argument( | ||
990 | '-e', '--ssh-exec', help='Executable to use in place of ssh') | ||
991 | parser_ide_sdk.add_argument( | ||
992 | '-P', '--port', help='Specify ssh port to use for connection to the target') | ||
993 | parser_ide_sdk.add_argument( | ||
994 | '-I', '--key', help='Specify ssh private key for connection to the target') | ||
995 | parser_ide_sdk.add_argument( | ||
996 | '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true') | ||
997 | parser_ide_sdk.add_argument( | ||
998 | '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true') | ||
999 | parser_ide_sdk.add_argument( | ||
1000 | '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false') | ||
1001 | parser_ide_sdk.add_argument( | ||
1002 | '-n', '--dry-run', help='List files to be undeployed only', action='store_true') | ||
1003 | parser_ide_sdk.add_argument( | ||
1004 | '-s', '--show-status', help='Show progress/status output', action='store_true') | ||
1005 | parser_ide_sdk.add_argument( | ||
1006 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') | ||
1007 | parser_ide_sdk.add_argument( | ||
1008 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') | ||
1009 | parser_ide_sdk.set_defaults(func=ide_setup) | ||
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py index 95384c5333..1054960551 100644 --- a/scripts/lib/devtool/menuconfig.py +++ b/scripts/lib/devtool/menuconfig.py | |||
@@ -3,6 +3,8 @@ | |||
3 | # Copyright (C) 2018 Xilinx | 3 | # Copyright (C) 2018 Xilinx |
4 | # Written by: Chandana Kalluri <ckalluri@xilinx.com> | 4 | # Written by: Chandana Kalluri <ckalluri@xilinx.com> |
5 | # | 5 | # |
6 | # SPDX-License-Identifier: MIT | ||
7 | # | ||
6 | # This program is free software; you can redistribute it and/or modify | 8 | # This program is free software; you can redistribute it and/or modify |
7 | # it under the terms of the GNU General Public License version 2 as | 9 | # it under the terms of the GNU General Public License version 2 as |
8 | # published by the Free Software Foundation. | 10 | # published by the Free Software Foundation. |
@@ -21,9 +23,6 @@ | |||
21 | import os | 23 | import os |
22 | import bb | 24 | import bb |
23 | import logging | 25 | import logging |
24 | import argparse | ||
25 | import re | ||
26 | import glob | ||
27 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command | 26 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command |
28 | from devtool import check_workspace_recipe | 27 | from devtool import check_workspace_recipe |
29 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
@@ -32,7 +31,6 @@ def menuconfig(args, config, basepath, workspace): | |||
32 | """Entry point for the devtool 'menuconfig' subcommand""" | 31 | """Entry point for the devtool 'menuconfig' subcommand""" |
33 | 32 | ||
34 | rd = "" | 33 | rd = "" |
35 | kconfigpath = "" | ||
36 | pn_src = "" | 34 | pn_src = "" |
37 | localfilesdir = "" | 35 | localfilesdir = "" |
38 | workspace_dir = "" | 36 | workspace_dir = "" |
@@ -43,13 +41,12 @@ def menuconfig(args, config, basepath, workspace): | |||
43 | return 1 | 41 | return 1 |
44 | 42 | ||
45 | check_workspace_recipe(workspace, args.component) | 43 | check_workspace_recipe(workspace, args.component) |
46 | pn = rd.getVar('PN', True) | 44 | pn = rd.getVar('PN') |
47 | 45 | ||
48 | if not rd.getVarFlag('do_menuconfig','task'): | 46 | if not rd.getVarFlag('do_menuconfig','task'): |
49 | raise DevtoolError("This recipe does not support menuconfig option") | 47 | raise DevtoolError("This recipe does not support menuconfig option") |
50 | 48 | ||
51 | workspace_dir = os.path.join(config.workspace_path,'sources') | 49 | workspace_dir = os.path.join(config.workspace_path,'sources') |
52 | kconfigpath = rd.getVar('B') | ||
53 | pn_src = os.path.join(workspace_dir,pn) | 50 | pn_src = os.path.join(workspace_dir,pn) |
54 | 51 | ||
55 | # add check to see if oe_local_files exists or not | 52 | # add check to see if oe_local_files exists or not |
@@ -68,7 +65,7 @@ def menuconfig(args, config, basepath, workspace): | |||
68 | logger.info('Launching menuconfig') | 65 | logger.info('Launching menuconfig') |
69 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) | 66 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) |
70 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') | 67 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') |
71 | res = standard._create_kconfig_diff(pn_src,rd,fragment) | 68 | standard._create_kconfig_diff(pn_src,rd,fragment) |
72 | 69 | ||
73 | return 0 | 70 | return 0 |
74 | 71 | ||
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py index ae3fc4caf9..9aefd7e354 100644 --- a/scripts/lib/devtool/sdk.py +++ b/scripts/lib/devtool/sdk.py | |||
@@ -207,7 +207,7 @@ def sdk_update(args, config, basepath, workspace): | |||
207 | if not sstate_mirrors: | 207 | if not sstate_mirrors: |
208 | with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: | 208 | with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: |
209 | f.write('SCONF_VERSION = "%s"\n' % site_conf_version) | 209 | f.write('SCONF_VERSION = "%s"\n' % site_conf_version) |
210 | f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) | 210 | f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver) |
211 | finally: | 211 | finally: |
212 | shutil.rmtree(tmpsdk_dir) | 212 | shutil.rmtree(tmpsdk_dir) |
213 | 213 | ||
@@ -300,7 +300,8 @@ def sdk_install(args, config, basepath, workspace): | |||
300 | return 2 | 300 | return 2 |
301 | 301 | ||
302 | try: | 302 | try: |
303 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True) | 303 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True) |
304 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True) | ||
304 | except bb.process.ExecutionError as e: | 305 | except bb.process.ExecutionError as e: |
305 | raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) | 306 | raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) |
306 | 307 | ||
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 01fb5ad96f..1fd5947c41 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -18,11 +18,13 @@ import argparse_oe | |||
18 | import scriptutils | 18 | import scriptutils |
19 | import errno | 19 | import errno |
20 | import glob | 20 | import glob |
21 | import filecmp | ||
22 | from collections import OrderedDict | 21 | from collections import OrderedDict |
22 | |||
23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError | 23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError |
24 | from devtool import parse_recipe | 24 | from devtool import parse_recipe |
25 | 25 | ||
26 | import bb.utils | ||
27 | |||
26 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
27 | 29 | ||
28 | override_branch_prefix = 'devtool-override-' | 30 | override_branch_prefix = 'devtool-override-' |
@@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-' | |||
30 | 32 | ||
31 | def add(args, config, basepath, workspace): | 33 | def add(args, config, basepath, workspace): |
32 | """Entry point for the devtool 'add' subcommand""" | 34 | """Entry point for the devtool 'add' subcommand""" |
33 | import bb | 35 | import bb.data |
36 | import bb.process | ||
34 | import oe.recipeutils | 37 | import oe.recipeutils |
35 | 38 | ||
36 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: | 39 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: |
@@ -147,6 +150,8 @@ def add(args, config, basepath, workspace): | |||
147 | extracmdopts += ' -a' | 150 | extracmdopts += ' -a' |
148 | if args.npm_dev: | 151 | if args.npm_dev: |
149 | extracmdopts += ' --npm-dev' | 152 | extracmdopts += ' --npm-dev' |
153 | if args.no_pypi: | ||
154 | extracmdopts += ' --no-pypi' | ||
150 | if args.mirrors: | 155 | if args.mirrors: |
151 | extracmdopts += ' --mirrors' | 156 | extracmdopts += ' --mirrors' |
152 | if args.srcrev: | 157 | if args.srcrev: |
@@ -204,7 +209,7 @@ def add(args, config, basepath, workspace): | |||
204 | for fn in os.listdir(tempdir): | 209 | for fn in os.listdir(tempdir): |
205 | shutil.move(os.path.join(tempdir, fn), recipedir) | 210 | shutil.move(os.path.join(tempdir, fn), recipedir) |
206 | else: | 211 | else: |
207 | raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) | 212 | raise DevtoolError(f'Failed to create a recipe file for source {source}') |
208 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) | 213 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) |
209 | if os.path.exists(attic_recipe): | 214 | if os.path.exists(attic_recipe): |
210 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) | 215 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) |
@@ -234,10 +239,14 @@ def add(args, config, basepath, workspace): | |||
234 | if args.fetchuri and not args.no_git: | 239 | if args.fetchuri and not args.no_git: |
235 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) | 240 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) |
236 | 241 | ||
237 | initial_rev = None | 242 | initial_rev = {} |
238 | if os.path.exists(os.path.join(srctree, '.git')): | 243 | if os.path.exists(os.path.join(srctree, '.git')): |
239 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 244 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
240 | initial_rev = stdout.rstrip() | 245 | initial_rev["."] = stdout.rstrip() |
246 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree) | ||
247 | for line in stdout.splitlines(): | ||
248 | (rev, submodule) = line.split() | ||
249 | initial_rev[os.path.relpath(submodule, srctree)] = rev | ||
241 | 250 | ||
242 | if args.src_subdir: | 251 | if args.src_subdir: |
243 | srctree = os.path.join(srctree, args.src_subdir) | 252 | srctree = os.path.join(srctree, args.src_subdir) |
@@ -251,7 +260,8 @@ def add(args, config, basepath, workspace): | |||
251 | if b_is_s: | 260 | if b_is_s: |
252 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) | 261 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) |
253 | if initial_rev: | 262 | if initial_rev: |
254 | f.write('\n# initial_rev: %s\n' % initial_rev) | 263 | for key, value in initial_rev.items(): |
264 | f.write('\n# initial_rev %s: %s\n' % (key, value)) | ||
255 | 265 | ||
256 | if args.binary: | 266 | if args.binary: |
257 | f.write('do_install:append() {\n') | 267 | f.write('do_install:append() {\n') |
@@ -298,6 +308,7 @@ def add(args, config, basepath, workspace): | |||
298 | 308 | ||
299 | def _check_compatible_recipe(pn, d): | 309 | def _check_compatible_recipe(pn, d): |
300 | """Check if the recipe is supported by devtool""" | 310 | """Check if the recipe is supported by devtool""" |
311 | import bb.data | ||
301 | if pn == 'perf': | 312 | if pn == 'perf': |
302 | raise DevtoolError("The perf recipe does not actually check out " | 313 | raise DevtoolError("The perf recipe does not actually check out " |
303 | "source and thus cannot be supported by this tool", | 314 | "source and thus cannot be supported by this tool", |
@@ -353,7 +364,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
353 | bb.utils.mkdirhier(dst_d) | 364 | bb.utils.mkdirhier(dst_d) |
354 | shutil.move(src, dst) | 365 | shutil.move(src, dst) |
355 | 366 | ||
356 | def _copy_file(src, dst, dry_run_outdir=None): | 367 | def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): |
357 | """Copy a file. Creates all the directory components of destination path.""" | 368 | """Copy a file. Creates all the directory components of destination path.""" |
358 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 369 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
359 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) | 370 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) |
@@ -367,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None): | |||
367 | 378 | ||
368 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | 379 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): |
369 | """List contents of a git treeish""" | 380 | """List contents of a git treeish""" |
370 | import bb | 381 | import bb.process |
371 | cmd = ['git', 'ls-tree', '-z', treeish] | 382 | cmd = ['git', 'ls-tree', '-z', treeish] |
372 | if recursive: | 383 | if recursive: |
373 | cmd.append('-r') | 384 | cmd.append('-r') |
@@ -380,6 +391,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | |||
380 | ret[split[3]] = split[0:3] | 391 | ret[split[3]] = split[0:3] |
381 | return ret | 392 | return ret |
382 | 393 | ||
394 | def _git_modified(repodir): | ||
395 | """List the difference between HEAD and the index""" | ||
396 | import bb.process | ||
397 | cmd = ['git', 'status', '--porcelain'] | ||
398 | out, _ = bb.process.run(cmd, cwd=repodir) | ||
399 | ret = [] | ||
400 | if out: | ||
401 | for line in out.split("\n"): | ||
402 | if line and not line.startswith('??'): | ||
403 | ret.append(line[3:]) | ||
404 | return ret | ||
405 | |||
406 | |||
383 | def _git_exclude_path(srctree, path): | 407 | def _git_exclude_path(srctree, path): |
384 | """Return pathspec (list of paths) that excludes certain path""" | 408 | """Return pathspec (list of paths) that excludes certain path""" |
385 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - | 409 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - |
@@ -407,8 +431,6 @@ def _ls_tree(directory): | |||
407 | 431 | ||
408 | def extract(args, config, basepath, workspace): | 432 | def extract(args, config, basepath, workspace): |
409 | """Entry point for the devtool 'extract' subcommand""" | 433 | """Entry point for the devtool 'extract' subcommand""" |
410 | import bb | ||
411 | |||
412 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 434 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
413 | if not tinfoil: | 435 | if not tinfoil: |
414 | # Error already shown | 436 | # Error already shown |
@@ -431,8 +453,6 @@ def extract(args, config, basepath, workspace): | |||
431 | 453 | ||
432 | def sync(args, config, basepath, workspace): | 454 | def sync(args, config, basepath, workspace): |
433 | """Entry point for the devtool 'sync' subcommand""" | 455 | """Entry point for the devtool 'sync' subcommand""" |
434 | import bb | ||
435 | |||
436 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 456 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
437 | if not tinfoil: | 457 | if not tinfoil: |
438 | # Error already shown | 458 | # Error already shown |
@@ -453,41 +473,11 @@ def sync(args, config, basepath, workspace): | |||
453 | finally: | 473 | finally: |
454 | tinfoil.shutdown() | 474 | tinfoil.shutdown() |
455 | 475 | ||
456 | def symlink_oelocal_files_srctree(rd,srctree): | ||
457 | import oe.patch | ||
458 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | ||
459 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | ||
460 | # (otherwise the recipe won't build as expected) | ||
461 | local_files_dir = os.path.join(srctree, 'oe-local-files') | ||
462 | addfiles = [] | ||
463 | for root, _, files in os.walk(local_files_dir): | ||
464 | relpth = os.path.relpath(root, local_files_dir) | ||
465 | if relpth != '.': | ||
466 | bb.utils.mkdirhier(os.path.join(srctree, relpth)) | ||
467 | for fn in files: | ||
468 | if fn == '.gitignore': | ||
469 | continue | ||
470 | destpth = os.path.join(srctree, relpth, fn) | ||
471 | if os.path.exists(destpth): | ||
472 | os.unlink(destpth) | ||
473 | if relpth != '.': | ||
474 | back_relpth = os.path.relpath(local_files_dir, root) | ||
475 | os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) | ||
476 | else: | ||
477 | os.symlink('oe-local-files/%s' % fn, destpth) | ||
478 | addfiles.append(os.path.join(relpth, fn)) | ||
479 | if addfiles: | ||
480 | bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree) | ||
481 | useroptions = [] | ||
482 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | ||
483 | bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree) | ||
484 | |||
485 | |||
486 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 476 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
487 | """Extract sources of a recipe""" | 477 | """Extract sources of a recipe""" |
488 | import oe.recipeutils | ||
489 | import oe.patch | ||
490 | import oe.path | 478 | import oe.path |
479 | import bb.data | ||
480 | import bb.process | ||
491 | 481 | ||
492 | pn = d.getVar('PN') | 482 | pn = d.getVar('PN') |
493 | 483 | ||
@@ -520,7 +510,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
520 | for event in history: | 510 | for event in history: |
521 | if not 'flag' in event: | 511 | if not 'flag' in event: |
522 | if event['op'].startswith((':append[', ':prepend[')): | 512 | if event['op'].startswith((':append[', ':prepend[')): |
523 | extra_overrides.append(event['op'].split('[')[1].split(']')[0]) | 513 | override = event['op'].split('[')[1].split(']')[0] |
514 | if not override.startswith('pn-'): | ||
515 | extra_overrides.append(override) | ||
524 | # We want to remove duplicate overrides. If a recipe had multiple | 516 | # We want to remove duplicate overrides. If a recipe had multiple |
525 | # SRC_URI_override += values it would cause mulitple instances of | 517 | # SRC_URI_override += values it would cause mulitple instances of |
526 | # overrides. This doesn't play nicely with things like creating a | 518 | # overrides. This doesn't play nicely with things like creating a |
@@ -550,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
550 | tempbasedir = d.getVar('WORKDIR') | 542 | tempbasedir = d.getVar('WORKDIR') |
551 | bb.utils.mkdirhier(tempbasedir) | 543 | bb.utils.mkdirhier(tempbasedir) |
552 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) | 544 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) |
545 | appendbackup = None | ||
553 | try: | 546 | try: |
554 | tinfoil.logger.setLevel(logging.WARNING) | 547 | tinfoil.logger.setLevel(logging.WARNING) |
555 | 548 | ||
@@ -560,11 +553,13 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
560 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') | 553 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') |
561 | shutil.copyfile(appendfile, appendbackup) | 554 | shutil.copyfile(appendfile, appendbackup) |
562 | else: | 555 | else: |
563 | appendbackup = None | ||
564 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 556 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
565 | logger.debug('writing append file %s' % appendfile) | 557 | logger.debug('writing append file %s' % appendfile) |
566 | with open(appendfile, 'a') as f: | 558 | with open(appendfile, 'a') as f: |
567 | f.write('###--- _extract_source\n') | 559 | f.write('###--- _extract_source\n') |
560 | f.write('deltask do_recipe_qa\n') | ||
561 | f.write('deltask do_recipe_qa_setscene\n') | ||
562 | f.write('ERROR_QA:remove = "patch-fuzz"\n') | ||
568 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) | 563 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) |
569 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) | 564 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) |
570 | if not is_kernel_yocto: | 565 | if not is_kernel_yocto: |
@@ -582,6 +577,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
582 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') | 577 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') |
583 | with open(preservestampfile, 'w') as f: | 578 | with open(preservestampfile, 'w') as f: |
584 | f.write(d.getVar('STAMP')) | 579 | f.write(d.getVar('STAMP')) |
580 | tinfoil.modified_files() | ||
585 | try: | 581 | try: |
586 | if is_kernel_yocto: | 582 | if is_kernel_yocto: |
587 | # We need to generate the kernel config | 583 | # We need to generate the kernel config |
@@ -629,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
629 | srcsubdir = f.read() | 625 | srcsubdir = f.read() |
630 | except FileNotFoundError as e: | 626 | except FileNotFoundError as e: |
631 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) | 627 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) |
632 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) | 628 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR')))) |
633 | 629 | ||
634 | # Check if work-shared is empty, if yes | 630 | # Check if work-shared is empty, if yes |
635 | # find source and copy to work-shared | 631 | # find source and copy to work-shared |
@@ -644,39 +640,26 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
644 | 640 | ||
645 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): | 641 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): |
646 | shutil.rmtree(workshareddir) | 642 | shutil.rmtree(workshareddir) |
647 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 643 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
648 | elif not os.path.exists(workshareddir): | 644 | elif not os.path.exists(workshareddir): |
649 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 645 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
650 | |||
651 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | ||
652 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | ||
653 | 646 | ||
654 | if sync: | 647 | if sync: |
655 | bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | 648 | try: |
656 | 649 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | |
657 | # Move oe-local-files directory to srctree | 650 | bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree) |
658 | # As the oe-local-files is not part of the constructed git tree, | 651 | |
659 | # remove them directly during the synchrounizating might surprise | 652 | # Use git fetch to update the source with the current recipe |
660 | # the users. Instead, we move it to oe-local-files.bak and remind | 653 | # To be able to update the currently checked out branch with |
661 | # user in the log message. | 654 | # possibly new history (no fast-forward) git needs to be told |
662 | if os.path.exists(srctree_localdir + '.bak'): | 655 | # that's ok |
663 | shutil.rmtree(srctree_localdir, srctree_localdir + '.bak') | 656 | logger.info('Syncing source files including patches to git branch: %s' % devbranch) |
664 | 657 | bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | |
665 | if os.path.exists(srctree_localdir): | 658 | except bb.process.ExecutionError as e: |
666 | logger.info('Backing up current local file directory %s' % srctree_localdir) | 659 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) |
667 | shutil.move(srctree_localdir, srctree_localdir + '.bak') | ||
668 | |||
669 | if os.path.exists(tempdir_localdir): | ||
670 | logger.info('Syncing local source files to srctree...') | ||
671 | shutil.copytree(tempdir_localdir, srctree_localdir) | ||
672 | else: | ||
673 | # Move oe-local-files directory to srctree | ||
674 | if os.path.exists(tempdir_localdir): | ||
675 | logger.info('Adding local source files to srctree...') | ||
676 | shutil.move(tempdir_localdir, srcsubdir) | ||
677 | 660 | ||
661 | else: | ||
678 | shutil.move(srcsubdir, srctree) | 662 | shutil.move(srcsubdir, srctree) |
679 | symlink_oelocal_files_srctree(d,srctree) | ||
680 | 663 | ||
681 | if is_kernel_yocto: | 664 | if is_kernel_yocto: |
682 | logger.info('Copying kernel config to srctree') | 665 | logger.info('Copying kernel config to srctree') |
@@ -695,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
695 | 678 | ||
696 | def _add_md5(config, recipename, filename): | 679 | def _add_md5(config, recipename, filename): |
697 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" | 680 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" |
698 | import bb.utils | ||
699 | |||
700 | def addfile(fn): | 681 | def addfile(fn): |
701 | md5 = bb.utils.md5_file(fn) | 682 | md5 = bb.utils.md5_file(fn) |
702 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: | 683 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: |
@@ -715,7 +696,6 @@ def _add_md5(config, recipename, filename): | |||
715 | def _check_preserve(config, recipename): | 696 | def _check_preserve(config, recipename): |
716 | """Check if a file was manually changed and needs to be saved in 'attic' | 697 | """Check if a file was manually changed and needs to be saved in 'attic' |
717 | directory""" | 698 | directory""" |
718 | import bb.utils | ||
719 | origfile = os.path.join(config.workspace_path, '.devtool_md5') | 699 | origfile = os.path.join(config.workspace_path, '.devtool_md5') |
720 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') | 700 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') |
721 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) | 701 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) |
@@ -746,26 +726,36 @@ def _check_preserve(config, recipename): | |||
746 | 726 | ||
747 | def get_staging_kver(srcdir): | 727 | def get_staging_kver(srcdir): |
748 | # Kernel version from work-shared | 728 | # Kernel version from work-shared |
749 | kerver = [] | 729 | import itertools |
750 | staging_kerVer="" | 730 | try: |
751 | if os.path.exists(srcdir) and os.listdir(srcdir): | 731 | with open(os.path.join(srcdir, "Makefile")) as f: |
752 | with open(os.path.join(srcdir,"Makefile")) as f: | 732 | # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3 |
753 | version = [next(f) for x in range(5)][1:4] | 733 | return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4)) |
754 | for word in version: | 734 | except FileNotFoundError: |
755 | kerver.append(word.split('= ')[1].split('\n')[0]) | 735 | return "" |
756 | staging_kerVer = ".".join(kerver) | ||
757 | return staging_kerVer | ||
758 | 736 | ||
759 | def get_staging_kbranch(srcdir): | 737 | def get_staging_kbranch(srcdir): |
738 | import bb.process | ||
760 | staging_kbranch = "" | 739 | staging_kbranch = "" |
761 | if os.path.exists(srcdir) and os.listdir(srcdir): | 740 | if os.path.exists(srcdir) and os.listdir(srcdir): |
762 | (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir) | 741 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) |
763 | staging_kbranch = "".join(branch.split('\n')[0]) | 742 | staging_kbranch = "".join(branch.split('\n')[0]) |
764 | return staging_kbranch | 743 | return staging_kbranch |
765 | 744 | ||
745 | def get_real_srctree(srctree, s, unpackdir): | ||
746 | # Check that recipe isn't using a shared workdir | ||
747 | s = os.path.abspath(s) | ||
748 | unpackdir = os.path.abspath(unpackdir) | ||
749 | if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir: | ||
750 | # Handle if S is set to a subdirectory of the source | ||
751 | srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1] | ||
752 | srctree = os.path.join(srctree, srcsubdir) | ||
753 | return srctree | ||
754 | |||
766 | def modify(args, config, basepath, workspace): | 755 | def modify(args, config, basepath, workspace): |
767 | """Entry point for the devtool 'modify' subcommand""" | 756 | """Entry point for the devtool 'modify' subcommand""" |
768 | import bb | 757 | import bb.data |
758 | import bb.process | ||
769 | import oe.recipeutils | 759 | import oe.recipeutils |
770 | import oe.patch | 760 | import oe.patch |
771 | import oe.path | 761 | import oe.path |
@@ -807,8 +797,8 @@ def modify(args, config, basepath, workspace): | |||
807 | 797 | ||
808 | _check_compatible_recipe(pn, rd) | 798 | _check_compatible_recipe(pn, rd) |
809 | 799 | ||
810 | initial_rev = None | 800 | initial_revs = {} |
811 | commits = [] | 801 | commits = {} |
812 | check_commits = False | 802 | check_commits = False |
813 | 803 | ||
814 | if bb.data.inherits_class('kernel-yocto', rd): | 804 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -820,36 +810,22 @@ def modify(args, config, basepath, workspace): | |||
820 | staging_kerVer = get_staging_kver(srcdir) | 810 | staging_kerVer = get_staging_kver(srcdir) |
821 | staging_kbranch = get_staging_kbranch(srcdir) | 811 | staging_kbranch = get_staging_kbranch(srcdir) |
822 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 812 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
823 | oe.path.copyhardlinktree(srcdir,srctree) | 813 | oe.path.copyhardlinktree(srcdir, srctree) |
824 | workdir = rd.getVar('WORKDIR') | 814 | unpackdir = rd.getVar('UNPACKDIR') |
825 | srcsubdir = rd.getVar('S') | 815 | srcsubdir = rd.getVar('S') |
826 | localfilesdir = os.path.join(srctree,'oe-local-files') | ||
827 | # Move local source files into separate subdir | ||
828 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | ||
829 | local_files = oe.recipeutils.get_recipe_local_files(rd) | ||
830 | 816 | ||
831 | for key in local_files.copy(): | 817 | # Add locally copied files to gitignore as we add back to the metadata directly |
832 | if key.endswith('scc'): | 818 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
833 | sccfile = open(local_files[key], 'r') | ||
834 | for l in sccfile: | ||
835 | line = l.split() | ||
836 | if line and line[0] in ('kconf', 'patch'): | ||
837 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
838 | if not cfg in local_files.values(): | ||
839 | local_files[line[-1]] = cfg | ||
840 | shutil.copy2(cfg, workdir) | ||
841 | sccfile.close() | ||
842 | |||
843 | # Ignore local files with subdir={BP} | ||
844 | srcabspath = os.path.abspath(srcsubdir) | 819 | srcabspath = os.path.abspath(srcsubdir) |
845 | local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] | 820 | local_files = [fname for fname in local_files if |
821 | os.path.exists(os.path.join(unpackdir, fname)) and | ||
822 | srcabspath == unpackdir] | ||
846 | if local_files: | 823 | if local_files: |
847 | for fname in local_files: | 824 | with open(os.path.join(srctree, '.gitignore'), 'a+') as f: |
848 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 825 | f.write('# Ignore local files, by default. Remove following lines' |
849 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 826 | 'if you want to commit the directory to Git\n') |
850 | f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n') | 827 | for fname in local_files: |
851 | 828 | f.write('%s\n' % fname) | |
852 | symlink_oelocal_files_srctree(rd,srctree) | ||
853 | 829 | ||
854 | task = 'do_configure' | 830 | task = 'do_configure' |
855 | res = tinfoil.build_targets(pn, task, handle_events=True) | 831 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -857,22 +833,33 @@ def modify(args, config, basepath, workspace): | |||
857 | # Copy .config to workspace | 833 | # Copy .config to workspace |
858 | kconfpath = rd.getVar('B') | 834 | kconfpath = rd.getVar('B') |
859 | logger.info('Copying kernel config to workspace') | 835 | logger.info('Copying kernel config to workspace') |
860 | shutil.copy2(os.path.join(kconfpath, '.config'),srctree) | 836 | shutil.copy2(os.path.join(kconfpath, '.config'), srctree) |
861 | 837 | ||
862 | # Set this to true, we still need to get initial_rev | 838 | # Set this to true, we still need to get initial_rev |
863 | # by parsing the git repo | 839 | # by parsing the git repo |
864 | args.no_extract = True | 840 | args.no_extract = True |
865 | 841 | ||
866 | if not args.no_extract: | 842 | if not args.no_extract: |
867 | initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | 843 | initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) |
868 | if not initial_rev: | 844 | if not initial_revs["."]: |
869 | return 1 | 845 | return 1 |
870 | logger.info('Source tree extracted to %s' % srctree) | 846 | logger.info('Source tree extracted to %s' % srctree) |
847 | |||
871 | if os.path.exists(os.path.join(srctree, '.git')): | 848 | if os.path.exists(os.path.join(srctree, '.git')): |
872 | # Get list of commits since this revision | 849 | # Get list of commits since this revision |
873 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree) | 850 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) |
874 | commits = stdout.split() | 851 | commits["."] = stdout.split() |
875 | check_commits = True | 852 | check_commits = True |
853 | try: | ||
854 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | ||
855 | except bb.process.ExecutionError: | ||
856 | stdout = "" | ||
857 | for line in stdout.splitlines(): | ||
858 | (rev, submodule_path) = line.split() | ||
859 | submodule = os.path.relpath(submodule_path, srctree) | ||
860 | initial_revs[submodule] = rev | ||
861 | (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path) | ||
862 | commits[submodule] = stdout.split() | ||
876 | else: | 863 | else: |
877 | if os.path.exists(os.path.join(srctree, '.git')): | 864 | if os.path.exists(os.path.join(srctree, '.git')): |
878 | # Check if it's a tree previously extracted by us. This is done | 865 | # Check if it's a tree previously extracted by us. This is done |
@@ -889,11 +876,11 @@ def modify(args, config, basepath, workspace): | |||
889 | for line in stdout.splitlines(): | 876 | for line in stdout.splitlines(): |
890 | if line.startswith('*'): | 877 | if line.startswith('*'): |
891 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) | 878 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) |
892 | initial_rev = stdout.rstrip() | 879 | initial_revs["."] = stdout.rstrip() |
893 | if not initial_rev: | 880 | if "." not in initial_revs: |
894 | # Otherwise, just grab the head revision | 881 | # Otherwise, just grab the head revision |
895 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 882 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
896 | initial_rev = stdout.rstrip() | 883 | initial_revs["."] = stdout.rstrip() |
897 | 884 | ||
898 | branch_patches = {} | 885 | branch_patches = {} |
899 | if check_commits: | 886 | if check_commits: |
@@ -910,28 +897,40 @@ def modify(args, config, basepath, workspace): | |||
910 | seen_patches = [] | 897 | seen_patches = [] |
911 | for branch in branches: | 898 | for branch in branches: |
912 | branch_patches[branch] = [] | 899 | branch_patches[branch] = [] |
913 | (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree) | 900 | (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree) |
914 | for line in stdout.splitlines(): | 901 | for sha1 in stdout.splitlines(): |
915 | line = line.strip() | 902 | notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip()) |
916 | if line.startswith(oe.patch.GitApplyTree.patch_line_prefix): | 903 | origpatch = notes.get(oe.patch.GitApplyTree.original_patch) |
917 | origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip() | 904 | if origpatch and origpatch not in seen_patches: |
918 | if not origpatch in seen_patches: | 905 | seen_patches.append(origpatch) |
919 | seen_patches.append(origpatch) | 906 | branch_patches[branch].append(origpatch) |
920 | branch_patches[branch].append(origpatch) | ||
921 | 907 | ||
922 | # Need to grab this here in case the source is within a subdirectory | 908 | # Need to grab this here in case the source is within a subdirectory |
923 | srctreebase = srctree | 909 | srctreebase = srctree |
924 | 910 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) | |
925 | # Check that recipe isn't using a shared workdir | ||
926 | s = os.path.abspath(rd.getVar('S')) | ||
927 | workdir = os.path.abspath(rd.getVar('WORKDIR')) | ||
928 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | ||
929 | # Handle if S is set to a subdirectory of the source | ||
930 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | ||
931 | srctree = os.path.join(srctree, srcsubdir) | ||
932 | 911 | ||
933 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 912 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
934 | with open(appendfile, 'w') as f: | 913 | with open(appendfile, 'w') as f: |
914 | # if not present, add type=git-dependency to the secondary sources | ||
915 | # (non local files) so they can be extracted correctly when building a recipe after | ||
916 | # doing a devtool modify on it | ||
917 | src_uri = rd.getVar('SRC_URI').split() | ||
918 | src_uri_append = [] | ||
919 | src_uri_remove = [] | ||
920 | |||
921 | # Assume first entry is main source extracted in ${S} so skip it | ||
922 | src_uri = src_uri[1::] | ||
923 | |||
924 | # Add "type=git-dependency" to all non local sources | ||
925 | for url in src_uri: | ||
926 | if not url.startswith('file://') and not 'type=' in url: | ||
927 | src_uri_remove.append(url) | ||
928 | src_uri_append.append('%s;type=git-dependency' % url) | ||
929 | |||
930 | if src_uri_remove: | ||
931 | f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove)) | ||
932 | f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append)) | ||
933 | |||
935 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') | 934 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') |
936 | # Local files can be modified/tracked in separate subdir under srctree | 935 | # Local files can be modified/tracked in separate subdir under srctree |
937 | # Mostly useful for packages with S != WORKDIR | 936 | # Mostly useful for packages with S != WORKDIR |
@@ -948,34 +947,31 @@ def modify(args, config, basepath, workspace): | |||
948 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) | 947 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
949 | 948 | ||
950 | if bb.data.inherits_class('kernel', rd): | 949 | if bb.data.inherits_class('kernel', rd): |
951 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | ||
952 | 'do_fetch do_unpack do_kernel_configcheck"\n') | ||
953 | f.write('\ndo_patch[noexec] = "1"\n') | ||
954 | f.write('\ndo_configure:append() {\n' | ||
955 | ' cp ${B}/.config ${S}/.config.baseline\n' | ||
956 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | ||
957 | '}\n') | ||
958 | f.write('\ndo_kernel_configme:prepend() {\n' | 950 | f.write('\ndo_kernel_configme:prepend() {\n' |
959 | ' if [ -e ${S}/.config ]; then\n' | 951 | ' if [ -e ${S}/.config ]; then\n' |
960 | ' mv ${S}/.config ${S}/.config.old\n' | 952 | ' mv ${S}/.config ${S}/.config.old\n' |
961 | ' fi\n' | 953 | ' fi\n' |
962 | '}\n') | 954 | '}\n') |
963 | if rd.getVarFlag('do_menuconfig','task'): | 955 | if rd.getVarFlag('do_menuconfig', 'task'): |
964 | f.write('\ndo_configure:append() {\n' | 956 | f.write('\ndo_configure:append() {\n' |
965 | ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' | 957 | ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n' |
966 | ' cp ${B}/.config ${S}/.config.baseline\n' | 958 | ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n' |
967 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | 959 | ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n' |
968 | ' fi\n' | 960 | ' fi\n' |
969 | '}\n') | 961 | '}\n') |
970 | if initial_rev: | 962 | if initial_revs: |
971 | f.write('\n# initial_rev: %s\n' % initial_rev) | 963 | for name, rev in initial_revs.items(): |
972 | for commit in commits: | 964 | f.write('\n# initial_rev %s: %s\n' % (name, rev)) |
973 | f.write('# commit: %s\n' % commit) | 965 | if name in commits: |
966 | for commit in commits[name]: | ||
967 | f.write('# commit %s: %s\n' % (name, commit)) | ||
974 | if branch_patches: | 968 | if branch_patches: |
975 | for branch in branch_patches: | 969 | for branch in branch_patches: |
976 | if branch == args.branch: | 970 | if branch == args.branch: |
977 | continue | 971 | continue |
978 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) | 972 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) |
973 | if args.debug_build: | ||
974 | f.write('\nDEBUG_BUILD = "1"\n') | ||
979 | 975 | ||
980 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 976 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
981 | 977 | ||
@@ -1020,6 +1016,7 @@ def rename(args, config, basepath, workspace): | |||
1020 | origfnver = '' | 1016 | origfnver = '' |
1021 | 1017 | ||
1022 | recipefilemd5 = None | 1018 | recipefilemd5 = None |
1019 | newrecipefilemd5 = None | ||
1023 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 1020 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
1024 | try: | 1021 | try: |
1025 | rd = parse_recipe(config, tinfoil, args.recipename, True) | 1022 | rd = parse_recipe(config, tinfoil, args.recipename, True) |
@@ -1097,6 +1094,7 @@ def rename(args, config, basepath, workspace): | |||
1097 | 1094 | ||
1098 | # Rename source tree if it's the default path | 1095 | # Rename source tree if it's the default path |
1099 | appendmd5 = None | 1096 | appendmd5 = None |
1097 | newappendmd5 = None | ||
1100 | if not args.no_srctree: | 1098 | if not args.no_srctree: |
1101 | srctree = workspace[args.recipename]['srctree'] | 1099 | srctree = workspace[args.recipename]['srctree'] |
1102 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): | 1100 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): |
@@ -1185,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1185 | """Get initial and update rev of a recipe. These are the start point of the | 1183 | """Get initial and update rev of a recipe. These are the start point of the |
1186 | whole patchset and start point for the patches to be re-generated/updated. | 1184 | whole patchset and start point for the patches to be re-generated/updated. |
1187 | """ | 1185 | """ |
1188 | import bb | 1186 | import bb.process |
1189 | 1187 | ||
1190 | # Get current branch | 1188 | # Get current branch |
1191 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', | 1189 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
@@ -1193,44 +1191,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1193 | branchname = stdout.rstrip() | 1191 | branchname = stdout.rstrip() |
1194 | 1192 | ||
1195 | # Parse initial rev from recipe if not specified | 1193 | # Parse initial rev from recipe if not specified |
1196 | commits = [] | 1194 | commits = {} |
1197 | patches = [] | 1195 | patches = [] |
1196 | initial_revs = {} | ||
1198 | with open(recipe_path, 'r') as f: | 1197 | with open(recipe_path, 'r') as f: |
1199 | for line in f: | 1198 | for line in f: |
1200 | if line.startswith('# initial_rev:'): | 1199 | pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$' |
1201 | if not initial_rev: | 1200 | match = re.search(pattern, line) |
1202 | initial_rev = line.split(':')[-1].strip() | 1201 | if match: |
1203 | elif line.startswith('# commit:') and not force_patch_refresh: | 1202 | name = match.group(1) |
1204 | commits.append(line.split(':')[-1].strip()) | 1203 | rev = match.group(2) |
1205 | elif line.startswith('# patches_%s:' % branchname): | 1204 | if line.startswith('# initial_rev'): |
1206 | patches = line.split(':')[-1].strip().split(',') | 1205 | if not (name == "." and initial_rev): |
1207 | 1206 | initial_revs[name] = rev | |
1208 | update_rev = initial_rev | 1207 | elif line.startswith('# commit') and not force_patch_refresh: |
1209 | changed_revs = None | 1208 | if name not in commits: |
1210 | if initial_rev: | 1209 | commits[name] = [rev] |
1210 | else: | ||
1211 | commits[name].append(rev) | ||
1212 | elif line.startswith('# patches_%s:' % branchname): | ||
1213 | patches = line.split(':')[-1].strip().split(',') | ||
1214 | |||
1215 | update_revs = dict(initial_revs) | ||
1216 | changed_revs = {} | ||
1217 | for name, rev in initial_revs.items(): | ||
1211 | # Find first actually changed revision | 1218 | # Find first actually changed revision |
1212 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % | 1219 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % |
1213 | initial_rev, cwd=srctree) | 1220 | rev, cwd=os.path.join(srctree, name)) |
1214 | newcommits = stdout.split() | 1221 | newcommits = stdout.split() |
1215 | for i in range(min(len(commits), len(newcommits))): | 1222 | if name in commits: |
1216 | if newcommits[i] == commits[i]: | 1223 | for i in range(min(len(commits[name]), len(newcommits))): |
1217 | update_rev = commits[i] | 1224 | if newcommits[i] == commits[name][i]: |
1225 | update_revs[name] = commits[name][i] | ||
1218 | 1226 | ||
1219 | try: | 1227 | try: |
1220 | stdout, _ = bb.process.run('git cherry devtool-patched', | 1228 | stdout, _ = bb.process.run('git cherry devtool-patched', |
1221 | cwd=srctree) | 1229 | cwd=os.path.join(srctree, name)) |
1222 | except bb.process.ExecutionError as err: | 1230 | except bb.process.ExecutionError as err: |
1223 | stdout = None | 1231 | stdout = None |
1224 | 1232 | ||
1225 | if stdout is not None and not force_patch_refresh: | 1233 | if stdout is not None and not force_patch_refresh: |
1226 | changed_revs = [] | ||
1227 | for line in stdout.splitlines(): | 1234 | for line in stdout.splitlines(): |
1228 | if line.startswith('+ '): | 1235 | if line.startswith('+ '): |
1229 | rev = line.split()[1] | 1236 | rev = line.split()[1] |
1230 | if rev in newcommits: | 1237 | if rev in newcommits: |
1231 | changed_revs.append(rev) | 1238 | if name not in changed_revs: |
1239 | changed_revs[name] = [rev] | ||
1240 | else: | ||
1241 | changed_revs[name].append(rev) | ||
1232 | 1242 | ||
1233 | return initial_rev, update_rev, changed_revs, patches | 1243 | return initial_revs, update_revs, changed_revs, patches |
1234 | 1244 | ||
1235 | def _remove_file_entries(srcuri, filelist): | 1245 | def _remove_file_entries(srcuri, filelist): |
1236 | """Remove file:// entries from SRC_URI""" | 1246 | """Remove file:// entries from SRC_URI""" |
@@ -1285,17 +1295,21 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru | |||
1285 | raise | 1295 | raise |
1286 | 1296 | ||
1287 | 1297 | ||
1288 | def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | 1298 | def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): |
1289 | """Export patches from srctree to given location. | 1299 | """Export patches from srctree to given location. |
1290 | Returns three-tuple of dicts: | 1300 | Returns three-tuple of dicts: |
1291 | 1. updated - patches that already exist in SRCURI | 1301 | 1. updated - patches that already exist in SRCURI |
1292 | 2. added - new patches that don't exist in SRCURI | 1302 | 2. added - new patches that don't exist in SRCURI |
1293 | 3 removed - patches that exist in SRCURI but not in exported patches | 1303 | 3 removed - patches that exist in SRCURI but not in exported patches |
1294 | In each dict the key is the 'basepath' of the URI and value is the | 1304 | In each dict the key is the 'basepath' of the URI and value is: |
1295 | absolute path to the existing file in recipe space (if any). | 1305 | - for updated and added dicts, a dict with 2 optionnal keys: |
1306 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1307 | - 'patchdir': the directory in wich the patch should be applied (if any) | ||
1308 | - for removed dict, the absolute path to the existing file in recipe space | ||
1296 | """ | 1309 | """ |
1297 | import oe.recipeutils | 1310 | import oe.recipeutils |
1298 | from oe.patch import GitApplyTree | 1311 | from oe.patch import GitApplyTree |
1312 | import bb.process | ||
1299 | updated = OrderedDict() | 1313 | updated = OrderedDict() |
1300 | added = OrderedDict() | 1314 | added = OrderedDict() |
1301 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') | 1315 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') |
@@ -1306,59 +1320,67 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | |||
1306 | 1320 | ||
1307 | # Generate patches from Git, exclude local files directory | 1321 | # Generate patches from Git, exclude local files directory |
1308 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') | 1322 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') |
1309 | GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec) | 1323 | GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec) |
1310 | 1324 | for dirpath, dirnames, filenames in os.walk(destdir): | |
1311 | new_patches = sorted(os.listdir(destdir)) | 1325 | new_patches = filenames |
1312 | for new_patch in new_patches: | 1326 | reldirpath = os.path.relpath(dirpath, destdir) |
1313 | # Strip numbering from patch names. If it's a git sequence named patch, | 1327 | for new_patch in new_patches: |
1314 | # the numbers might not match up since we are starting from a different | 1328 | # Strip numbering from patch names. If it's a git sequence named patch, |
1315 | # revision This does assume that people are using unique shortlog | 1329 | # the numbers might not match up since we are starting from a different |
1316 | # values, but they ought to be anyway... | 1330 | # revision This does assume that people are using unique shortlog |
1317 | new_basename = seqpatch_re.match(new_patch).group(2) | 1331 | # values, but they ought to be anyway... |
1318 | match_name = None | 1332 | new_basename = seqpatch_re.match(new_patch).group(2) |
1319 | for old_patch in existing_patches: | 1333 | match_name = None |
1320 | old_basename = seqpatch_re.match(old_patch).group(2) | 1334 | old_patch = None |
1321 | old_basename_splitext = os.path.splitext(old_basename) | 1335 | for old_patch in existing_patches: |
1322 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: | 1336 | old_basename = seqpatch_re.match(old_patch).group(2) |
1323 | old_patch_noext = os.path.splitext(old_patch)[0] | 1337 | old_basename_splitext = os.path.splitext(old_basename) |
1324 | match_name = old_patch_noext | 1338 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: |
1325 | break | 1339 | old_patch_noext = os.path.splitext(old_patch)[0] |
1326 | elif new_basename == old_basename: | 1340 | match_name = old_patch_noext |
1327 | match_name = old_patch | 1341 | break |
1328 | break | 1342 | elif new_basename == old_basename: |
1329 | if match_name: | 1343 | match_name = old_patch |
1330 | # Rename patch files | 1344 | break |
1331 | if new_patch != match_name: | 1345 | if match_name: |
1332 | bb.utils.rename(os.path.join(destdir, new_patch), | 1346 | # Rename patch files |
1333 | os.path.join(destdir, match_name)) | 1347 | if new_patch != match_name: |
1334 | # Need to pop it off the list now before checking changed_revs | 1348 | bb.utils.rename(os.path.join(destdir, new_patch), |
1335 | oldpath = existing_patches.pop(old_patch) | 1349 | os.path.join(destdir, match_name)) |
1336 | if changed_revs is not None: | 1350 | # Need to pop it off the list now before checking changed_revs |
1337 | # Avoid updating patches that have not actually changed | 1351 | oldpath = existing_patches.pop(old_patch) |
1338 | with open(os.path.join(destdir, match_name), 'r') as f: | 1352 | if changed_revs is not None and dirpath in changed_revs: |
1339 | firstlineitems = f.readline().split() | 1353 | # Avoid updating patches that have not actually changed |
1340 | # Looking for "From <hash>" line | 1354 | with open(os.path.join(dirpath, match_name), 'r') as f: |
1341 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: | 1355 | firstlineitems = f.readline().split() |
1342 | if not firstlineitems[1] in changed_revs: | 1356 | # Looking for "From <hash>" line |
1343 | continue | 1357 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: |
1344 | # Recompress if necessary | 1358 | if not firstlineitems[1] in changed_revs[dirpath]: |
1345 | if oldpath.endswith(('.gz', '.Z')): | 1359 | continue |
1346 | bb.process.run(['gzip', match_name], cwd=destdir) | 1360 | # Recompress if necessary |
1347 | if oldpath.endswith('.gz'): | 1361 | if oldpath.endswith(('.gz', '.Z')): |
1348 | match_name += '.gz' | 1362 | bb.process.run(['gzip', match_name], cwd=destdir) |
1349 | else: | 1363 | if oldpath.endswith('.gz'): |
1350 | match_name += '.Z' | 1364 | match_name += '.gz' |
1351 | elif oldpath.endswith('.bz2'): | 1365 | else: |
1352 | bb.process.run(['bzip2', match_name], cwd=destdir) | 1366 | match_name += '.Z' |
1353 | match_name += '.bz2' | 1367 | elif oldpath.endswith('.bz2'): |
1354 | updated[match_name] = oldpath | 1368 | bb.process.run(['bzip2', match_name], cwd=destdir) |
1355 | else: | 1369 | match_name += '.bz2' |
1356 | added[new_patch] = None | 1370 | updated[match_name] = {'path' : oldpath} |
1371 | if reldirpath != ".": | ||
1372 | updated[match_name]['patchdir'] = reldirpath | ||
1373 | else: | ||
1374 | added[new_patch] = {} | ||
1375 | if reldirpath != ".": | ||
1376 | added[new_patch]['patchdir'] = reldirpath | ||
1377 | |||
1357 | return (updated, added, existing_patches) | 1378 | return (updated, added, existing_patches) |
1358 | 1379 | ||
1359 | 1380 | ||
1360 | def _create_kconfig_diff(srctree, rd, outfile): | 1381 | def _create_kconfig_diff(srctree, rd, outfile): |
1361 | """Create a kconfig fragment""" | 1382 | """Create a kconfig fragment""" |
1383 | import bb.process | ||
1362 | # Only update config fragment if both config files exist | 1384 | # Only update config fragment if both config files exist |
1363 | orig_config = os.path.join(srctree, '.config.baseline') | 1385 | orig_config = os.path.join(srctree, '.config.baseline') |
1364 | new_config = os.path.join(srctree, '.config.new') | 1386 | new_config = os.path.join(srctree, '.config.new') |
@@ -1390,38 +1412,59 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1390 | 1. updated - files that already exist in SRCURI | 1412 | 1. updated - files that already exist in SRCURI |
1391 | 2. added - new files files that don't exist in SRCURI | 1413 | 2. added - new files files that don't exist in SRCURI |
1392 | 3 removed - files that exist in SRCURI but not in exported files | 1414 | 3 removed - files that exist in SRCURI but not in exported files |
1393 | In each dict the key is the 'basepath' of the URI and value is the | 1415 | In each dict the key is the 'basepath' of the URI and value is: |
1394 | absolute path to the existing file in recipe space (if any). | 1416 | - for updated and added dicts, a dict with 1 optionnal key: |
1417 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1418 | - for removed dict, the absolute path to the existing file in recipe space | ||
1395 | """ | 1419 | """ |
1396 | import oe.recipeutils | 1420 | import oe.recipeutils |
1421 | import bb.data | ||
1422 | import bb.process | ||
1397 | 1423 | ||
1398 | # Find out local files (SRC_URI files that exist in the "recipe space"). | 1424 | # Find out local files (SRC_URI files that exist in the "recipe space"). |
1399 | # Local files that reside in srctree are not included in patch generation. | 1425 | # Local files that reside in srctree are not included in patch generation. |
1400 | # Instead they are directly copied over the original source files (in | 1426 | # Instead they are directly copied over the original source files (in |
1401 | # recipe space). | 1427 | # recipe space). |
1402 | existing_files = oe.recipeutils.get_recipe_local_files(rd) | 1428 | existing_files = oe.recipeutils.get_recipe_local_files(rd) |
1429 | |||
1403 | new_set = None | 1430 | new_set = None |
1404 | updated = OrderedDict() | 1431 | updated = OrderedDict() |
1405 | added = OrderedDict() | 1432 | added = OrderedDict() |
1406 | removed = OrderedDict() | 1433 | removed = OrderedDict() |
1407 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1434 | |
1408 | git_files = _git_ls_tree(srctree) | 1435 | # Get current branch and return early with empty lists |
1409 | if 'oe-local-files' in git_files: | 1436 | # if on one of the override branches |
1410 | # If tracked by Git, take the files from srctree HEAD. First get | 1437 | # (local files are provided only for the main branch and processing |
1411 | # the tree object of the directory | 1438 | # them against lists from recipe overrides will result in mismatches |
1412 | tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') | 1439 | # and broken modifications to recipes). |
1413 | tree = git_files['oe-local-files'][2] | 1440 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
1414 | bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, | 1441 | cwd=srctree) |
1415 | env=dict(os.environ, GIT_WORK_TREE=destdir, | 1442 | branchname = stdout.rstrip() |
1416 | GIT_INDEX_FILE=tmp_index)) | 1443 | if branchname.startswith(override_branch_prefix): |
1417 | new_set = list(_git_ls_tree(srctree, tree, True).keys()) | 1444 | return (updated, added, removed) |
1418 | elif os.path.isdir(local_files_dir): | 1445 | |
1419 | # If not tracked by Git, just copy from working copy | 1446 | files = _git_modified(srctree) |
1420 | new_set = _ls_tree(local_files_dir) | 1447 | #if not files: |
1421 | bb.process.run(['cp', '-ax', | 1448 | # files = _ls_tree(srctree) |
1422 | os.path.join(local_files_dir, '.'), destdir]) | 1449 | for f in files: |
1423 | else: | 1450 | fullfile = os.path.join(srctree, f) |
1424 | new_set = [] | 1451 | if os.path.exists(os.path.join(fullfile, ".git")): |
1452 | # submodules handled elsewhere | ||
1453 | continue | ||
1454 | if f not in existing_files: | ||
1455 | added[f] = {} | ||
1456 | if os.path.isdir(os.path.join(srctree, f)): | ||
1457 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1458 | else: | ||
1459 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1460 | elif not os.path.exists(fullfile): | ||
1461 | removed[f] = existing_files[f] | ||
1462 | elif f in existing_files: | ||
1463 | updated[f] = {'path' : existing_files[f]} | ||
1464 | if os.path.isdir(os.path.join(srctree, f)): | ||
1465 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1466 | else: | ||
1467 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1425 | 1468 | ||
1426 | # Special handling for kernel config | 1469 | # Special handling for kernel config |
1427 | if bb.data.inherits_class('kernel-yocto', rd): | 1470 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -1429,17 +1472,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1429 | fragment_path = os.path.join(destdir, fragment_fn) | 1472 | fragment_path = os.path.join(destdir, fragment_fn) |
1430 | if _create_kconfig_diff(srctree, rd, fragment_path): | 1473 | if _create_kconfig_diff(srctree, rd, fragment_path): |
1431 | if os.path.exists(fragment_path): | 1474 | if os.path.exists(fragment_path): |
1432 | if fragment_fn not in new_set: | 1475 | if fragment_fn in removed: |
1433 | new_set.append(fragment_fn) | 1476 | del removed[fragment_fn] |
1434 | # Copy fragment to local-files | 1477 | if fragment_fn not in updated and fragment_fn not in added: |
1435 | if os.path.isdir(local_files_dir): | 1478 | added[fragment_fn] = {} |
1436 | shutil.copy2(fragment_path, local_files_dir) | ||
1437 | else: | 1479 | else: |
1438 | if fragment_fn in new_set: | 1480 | if fragment_fn in updated: |
1439 | new_set.remove(fragment_fn) | 1481 | removed[fragment_fn] = updated[fragment_fn] |
1440 | # Remove fragment from local-files | 1482 | del updated[fragment_fn] |
1441 | if os.path.exists(os.path.join(local_files_dir, fragment_fn)): | ||
1442 | os.unlink(os.path.join(local_files_dir, fragment_fn)) | ||
1443 | 1483 | ||
1444 | # Special handling for cml1, ccmake, etc bbclasses that generated | 1484 | # Special handling for cml1, ccmake, etc bbclasses that generated |
1445 | # configuration fragment files that are consumed as source files | 1485 | # configuration fragment files that are consumed as source files |
@@ -1447,42 +1487,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1447 | if bb.data.inherits_class(frag_class, rd): | 1487 | if bb.data.inherits_class(frag_class, rd): |
1448 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) | 1488 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) |
1449 | if os.path.exists(srcpath): | 1489 | if os.path.exists(srcpath): |
1450 | if frag_name not in new_set: | 1490 | if frag_name in removed: |
1451 | new_set.append(frag_name) | 1491 | del removed[frag_name] |
1492 | if frag_name not in updated: | ||
1493 | added[frag_name] = {} | ||
1452 | # copy fragment into destdir | 1494 | # copy fragment into destdir |
1453 | shutil.copy2(srcpath, destdir) | 1495 | shutil.copy2(srcpath, destdir) |
1454 | # copy fragment into local files if exists | 1496 | |
1455 | if os.path.isdir(local_files_dir): | ||
1456 | shutil.copy2(srcpath, local_files_dir) | ||
1457 | |||
1458 | if new_set is not None: | ||
1459 | for fname in new_set: | ||
1460 | if fname in existing_files: | ||
1461 | origpath = existing_files.pop(fname) | ||
1462 | workpath = os.path.join(local_files_dir, fname) | ||
1463 | if not filecmp.cmp(origpath, workpath): | ||
1464 | updated[fname] = origpath | ||
1465 | elif fname != '.gitignore': | ||
1466 | added[fname] = None | ||
1467 | |||
1468 | workdir = rd.getVar('WORKDIR') | ||
1469 | s = rd.getVar('S') | ||
1470 | if not s.endswith(os.sep): | ||
1471 | s += os.sep | ||
1472 | |||
1473 | if workdir != s: | ||
1474 | # Handle files where subdir= was specified | ||
1475 | for fname in list(existing_files.keys()): | ||
1476 | # FIXME handle both subdir starting with BP and not? | ||
1477 | fworkpath = os.path.join(workdir, fname) | ||
1478 | if fworkpath.startswith(s): | ||
1479 | fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) | ||
1480 | if os.path.exists(fpath): | ||
1481 | origpath = existing_files.pop(fname) | ||
1482 | if not filecmp.cmp(origpath, fpath): | ||
1483 | updated[fpath] = origpath | ||
1484 | |||
1485 | removed = existing_files | ||
1486 | return (updated, added, removed) | 1497 | return (updated, added, removed) |
1487 | 1498 | ||
1488 | 1499 | ||
@@ -1500,7 +1511,7 @@ def _determine_files_dir(rd): | |||
1500 | 1511 | ||
1501 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): | 1512 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): |
1502 | """Implement the 'srcrev' mode of update-recipe""" | 1513 | """Implement the 'srcrev' mode of update-recipe""" |
1503 | import bb | 1514 | import bb.process |
1504 | import oe.recipeutils | 1515 | import oe.recipeutils |
1505 | 1516 | ||
1506 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 1517 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
@@ -1509,6 +1520,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1509 | recipedir = os.path.basename(recipefile) | 1520 | recipedir = os.path.basename(recipefile) |
1510 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) | 1521 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) |
1511 | 1522 | ||
1523 | # Get original SRCREV | ||
1524 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1525 | if old_srcrev == "INVALID": | ||
1526 | raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository') | ||
1527 | old_srcrev = {'.': old_srcrev} | ||
1528 | |||
1512 | # Get HEAD revision | 1529 | # Get HEAD revision |
1513 | try: | 1530 | try: |
1514 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) | 1531 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) |
@@ -1532,16 +1549,16 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1532 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1549 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1533 | srctreebase = workspace[recipename]['srctreebase'] | 1550 | srctreebase = workspace[recipename]['srctreebase'] |
1534 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | 1551 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1552 | removedentries = {} | ||
1535 | if not no_remove: | 1553 | if not no_remove: |
1536 | # Find list of existing patches in recipe file | 1554 | # Find list of existing patches in recipe file |
1537 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1555 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1538 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1539 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, | 1556 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, |
1540 | patches_dir) | 1557 | patches_dir) |
1541 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) | 1558 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) |
1542 | 1559 | ||
1543 | # Remove deleted local files and "overlapping" patches | 1560 | # Remove deleted local files and "overlapping" patches |
1544 | remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values()) | 1561 | remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value] |
1545 | if remove_files: | 1562 | if remove_files: |
1546 | removedentries = _remove_file_entries(srcuri, remove_files)[0] | 1563 | removedentries = _remove_file_entries(srcuri, remove_files)[0] |
1547 | update_srcuri = True | 1564 | update_srcuri = True |
@@ -1555,14 +1572,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1555 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) | 1572 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) |
1556 | if dry_run_outdir: | 1573 | if dry_run_outdir: |
1557 | logger.info('Creating bbappend (dry-run)') | 1574 | logger.info('Creating bbappend (dry-run)') |
1558 | else: | 1575 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1559 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1576 | rd, appendlayerdir, files, wildcardver=wildcard_version, |
1560 | rd, appendlayerdir, files, wildcardver=wildcard_version, | 1577 | extralines=patchfields, removevalues=removevalues, |
1561 | extralines=patchfields, removevalues=removevalues, | 1578 | redirect_output=dry_run_outdir) |
1562 | redirect_output=dry_run_outdir) | ||
1563 | else: | 1579 | else: |
1564 | files_dir = _determine_files_dir(rd) | 1580 | files_dir = _determine_files_dir(rd) |
1565 | for basepath, path in upd_f.items(): | 1581 | for basepath, param in upd_f.items(): |
1582 | path = param['path'] | ||
1566 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) | 1583 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) |
1567 | if os.path.isabs(basepath): | 1584 | if os.path.isabs(basepath): |
1568 | # Original file (probably with subdir pointing inside source tree) | 1585 | # Original file (probably with subdir pointing inside source tree) |
@@ -1572,7 +1589,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1572 | _move_file(os.path.join(local_files_dir, basepath), path, | 1589 | _move_file(os.path.join(local_files_dir, basepath), path, |
1573 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1590 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1574 | update_srcuri= True | 1591 | update_srcuri= True |
1575 | for basepath, path in new_f.items(): | 1592 | for basepath, param in new_f.items(): |
1593 | path = param['path'] | ||
1576 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1594 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1577 | _move_file(os.path.join(local_files_dir, basepath), | 1595 | _move_file(os.path.join(local_files_dir, basepath), |
1578 | os.path.join(files_dir, basepath), | 1596 | os.path.join(files_dir, basepath), |
@@ -1595,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1595 | 1613 | ||
1596 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): | 1614 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): |
1597 | """Implement the 'patch' mode of update-recipe""" | 1615 | """Implement the 'patch' mode of update-recipe""" |
1598 | import bb | ||
1599 | import oe.recipeutils | 1616 | import oe.recipeutils |
1600 | 1617 | ||
1601 | recipefile = rd.getVar('FILE') | 1618 | recipefile = rd.getVar('FILE') |
@@ -1604,9 +1621,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1604 | if not os.path.exists(append): | 1621 | if not os.path.exists(append): |
1605 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % | 1622 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % |
1606 | recipename) | 1623 | recipename) |
1624 | srctreebase = workspace[recipename]['srctreebase'] | ||
1625 | relpatchdir = os.path.relpath(srctreebase, srctree) | ||
1626 | if relpatchdir == '.': | ||
1627 | patchdir_params = {} | ||
1628 | else: | ||
1629 | patchdir_params = {'patchdir': relpatchdir} | ||
1630 | |||
1631 | def srcuri_entry(basepath, patchdir_params): | ||
1632 | if patchdir_params: | ||
1633 | paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items()) | ||
1634 | else: | ||
1635 | paramstr = '' | ||
1636 | return 'file://%s%s' % (basepath, paramstr) | ||
1607 | 1637 | ||
1608 | initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) | 1638 | initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) |
1609 | if not initial_rev: | 1639 | if not initial_revs: |
1610 | raise DevtoolError('Unable to find initial revision - please specify ' | 1640 | raise DevtoolError('Unable to find initial revision - please specify ' |
1611 | 'it with --initial-rev') | 1641 | 'it with --initial-rev') |
1612 | 1642 | ||
@@ -1620,61 +1650,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1620 | tempdir = tempfile.mkdtemp(prefix='devtool') | 1650 | tempdir = tempfile.mkdtemp(prefix='devtool') |
1621 | try: | 1651 | try: |
1622 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1652 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1623 | if filter_patches: | 1653 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1624 | upd_f = {} | ||
1625 | new_f = {} | ||
1626 | del_f = {} | ||
1627 | else: | ||
1628 | srctreebase = workspace[recipename]['srctreebase'] | ||
1629 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | ||
1630 | |||
1631 | remove_files = [] | ||
1632 | if not no_remove: | ||
1633 | # Get all patches from source tree and check if any should be removed | ||
1634 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1635 | _, _, del_p = _export_patches(srctree, rd, initial_rev, | ||
1636 | all_patches_dir) | ||
1637 | # Remove deleted local files and patches | ||
1638 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1639 | 1654 | ||
1640 | # Get updated patches from source tree | 1655 | # Get updated patches from source tree |
1641 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1656 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1642 | upd_p, new_p, _ = _export_patches(srctree, rd, update_rev, | 1657 | upd_p, new_p, _ = _export_patches(srctree, rd, update_revs, |
1643 | patches_dir, changed_revs) | 1658 | patches_dir, changed_revs) |
1659 | # Get all patches from source tree and check if any should be removed | ||
1660 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1661 | _, _, del_p = _export_patches(srctree, rd, initial_revs, | ||
1662 | all_patches_dir) | ||
1644 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) | 1663 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) |
1645 | if filter_patches: | 1664 | if filter_patches: |
1646 | new_p = OrderedDict() | 1665 | new_p = OrderedDict() |
1647 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) | 1666 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) |
1648 | remove_files = [f for f in remove_files if f in filter_patches] | 1667 | del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches) |
1668 | remove_files = [] | ||
1669 | if not no_remove: | ||
1670 | # Remove deleted local files and patches | ||
1671 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1649 | updatefiles = False | 1672 | updatefiles = False |
1650 | updaterecipe = False | 1673 | updaterecipe = False |
1651 | destpath = None | 1674 | destpath = None |
1652 | srcuri = (rd.getVar('SRC_URI', False) or '').split() | 1675 | srcuri = (rd.getVar('SRC_URI', False) or '').split() |
1676 | |||
1653 | if appendlayerdir: | 1677 | if appendlayerdir: |
1654 | files = OrderedDict((os.path.join(local_files_dir, key), val) for | 1678 | files = OrderedDict((os.path.join(local_files_dir, key), val) for |
1655 | key, val in list(upd_f.items()) + list(new_f.items())) | 1679 | key, val in list(upd_f.items()) + list(new_f.items())) |
1656 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for | 1680 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for |
1657 | key, val in list(upd_p.items()) + list(new_p.items()))) | 1681 | key, val in list(upd_p.items()) + list(new_p.items()))) |
1682 | |||
1683 | params = [] | ||
1684 | for file, param in files.items(): | ||
1685 | patchdir_param = dict(patchdir_params) | ||
1686 | patchdir = param.get('patchdir', ".") | ||
1687 | if patchdir != "." : | ||
1688 | if patchdir_param: | ||
1689 | patchdir_param['patchdir'] += patchdir | ||
1690 | else: | ||
1691 | patchdir_param['patchdir'] = patchdir | ||
1692 | params.append(patchdir_param) | ||
1693 | |||
1658 | if files or remove_files: | 1694 | if files or remove_files: |
1659 | removevalues = None | 1695 | removevalues = None |
1660 | if remove_files: | 1696 | if remove_files: |
1661 | removedentries, remaining = _remove_file_entries( | 1697 | removedentries, remaining = _remove_file_entries( |
1662 | srcuri, remove_files) | 1698 | srcuri, remove_files) |
1663 | if removedentries or remaining: | 1699 | if removedentries or remaining: |
1664 | remaining = ['file://' + os.path.basename(item) for | 1700 | remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for |
1665 | item in remaining] | 1701 | item in remaining] |
1666 | removevalues = {'SRC_URI': removedentries + remaining} | 1702 | removevalues = {'SRC_URI': removedentries + remaining} |
1667 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1703 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1668 | rd, appendlayerdir, files, | 1704 | rd, appendlayerdir, files, |
1669 | wildcardver=wildcard_version, | 1705 | wildcardver=wildcard_version, |
1670 | removevalues=removevalues, | 1706 | removevalues=removevalues, |
1671 | redirect_output=dry_run_outdir) | 1707 | redirect_output=dry_run_outdir, |
1708 | params=params) | ||
1672 | else: | 1709 | else: |
1673 | logger.info('No patches or local source files needed updating') | 1710 | logger.info('No patches or local source files needed updating') |
1674 | else: | 1711 | else: |
1675 | # Update existing files | 1712 | # Update existing files |
1676 | files_dir = _determine_files_dir(rd) | 1713 | files_dir = _determine_files_dir(rd) |
1677 | for basepath, path in upd_f.items(): | 1714 | for basepath, param in upd_f.items(): |
1715 | path = param['path'] | ||
1678 | logger.info('Updating file %s' % basepath) | 1716 | logger.info('Updating file %s' % basepath) |
1679 | if os.path.isabs(basepath): | 1717 | if os.path.isabs(basepath): |
1680 | # Original file (probably with subdir pointing inside source tree) | 1718 | # Original file (probably with subdir pointing inside source tree) |
@@ -1685,14 +1723,23 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1685 | _move_file(os.path.join(local_files_dir, basepath), path, | 1723 | _move_file(os.path.join(local_files_dir, basepath), path, |
1686 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1724 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1687 | updatefiles = True | 1725 | updatefiles = True |
1688 | for basepath, path in upd_p.items(): | 1726 | for basepath, param in upd_p.items(): |
1689 | patchfn = os.path.join(patches_dir, basepath) | 1727 | path = param['path'] |
1728 | patchdir = param.get('patchdir', ".") | ||
1729 | patchdir_param = {} | ||
1730 | if patchdir != "." : | ||
1731 | patchdir_param = dict(patchdir_params) | ||
1732 | if patchdir_param: | ||
1733 | patchdir_param['patchdir'] += patchdir | ||
1734 | else: | ||
1735 | patchdir_param['patchdir'] = patchdir | ||
1736 | patchfn = os.path.join(patches_dir, patchdir, basepath) | ||
1690 | if os.path.dirname(path) + '/' == dl_dir: | 1737 | if os.path.dirname(path) + '/' == dl_dir: |
1691 | # This is a a downloaded patch file - we now need to | 1738 | # This is a a downloaded patch file - we now need to |
1692 | # replace the entry in SRC_URI with our local version | 1739 | # replace the entry in SRC_URI with our local version |
1693 | logger.info('Replacing remote patch %s with updated local version' % basepath) | 1740 | logger.info('Replacing remote patch %s with updated local version' % basepath) |
1694 | path = os.path.join(files_dir, basepath) | 1741 | path = os.path.join(files_dir, basepath) |
1695 | _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) | 1742 | _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param)) |
1696 | updaterecipe = True | 1743 | updaterecipe = True |
1697 | else: | 1744 | else: |
1698 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) | 1745 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) |
@@ -1700,21 +1747,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1700 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1747 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1701 | updatefiles = True | 1748 | updatefiles = True |
1702 | # Add any new files | 1749 | # Add any new files |
1703 | for basepath, path in new_f.items(): | 1750 | for basepath, param in new_f.items(): |
1704 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1751 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1705 | _move_file(os.path.join(local_files_dir, basepath), | 1752 | _move_file(os.path.join(local_files_dir, basepath), |
1706 | os.path.join(files_dir, basepath), | 1753 | os.path.join(files_dir, basepath), |
1707 | dry_run_outdir=dry_run_outdir, | 1754 | dry_run_outdir=dry_run_outdir, |
1708 | base_outdir=recipedir) | 1755 | base_outdir=recipedir) |
1709 | srcuri.append('file://%s' % basepath) | 1756 | srcuri.append(srcuri_entry(basepath, patchdir_params)) |
1710 | updaterecipe = True | 1757 | updaterecipe = True |
1711 | for basepath, path in new_p.items(): | 1758 | for basepath, param in new_p.items(): |
1759 | patchdir = param.get('patchdir', ".") | ||
1712 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) | 1760 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) |
1713 | _move_file(os.path.join(patches_dir, basepath), | 1761 | _move_file(os.path.join(patches_dir, patchdir, basepath), |
1714 | os.path.join(files_dir, basepath), | 1762 | os.path.join(files_dir, basepath), |
1715 | dry_run_outdir=dry_run_outdir, | 1763 | dry_run_outdir=dry_run_outdir, |
1716 | base_outdir=recipedir) | 1764 | base_outdir=recipedir) |
1717 | srcuri.append('file://%s' % basepath) | 1765 | params = dict(patchdir_params) |
1766 | if patchdir != "." : | ||
1767 | if params: | ||
1768 | params['patchdir'] += patchdir | ||
1769 | else: | ||
1770 | params['patchdir'] = patchdir | ||
1771 | |||
1772 | srcuri.append(srcuri_entry(basepath, params)) | ||
1718 | updaterecipe = True | 1773 | updaterecipe = True |
1719 | # Update recipe, if needed | 1774 | # Update recipe, if needed |
1720 | if _remove_file_entries(srcuri, remove_files)[0]: | 1775 | if _remove_file_entries(srcuri, remove_files)[0]: |
@@ -1737,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1737 | 1792 | ||
1738 | def _guess_recipe_update_mode(srctree, rdata): | 1793 | def _guess_recipe_update_mode(srctree, rdata): |
1739 | """Guess the recipe update mode to use""" | 1794 | """Guess the recipe update mode to use""" |
1795 | import bb.process | ||
1740 | src_uri = (rdata.getVar('SRC_URI') or '').split() | 1796 | src_uri = (rdata.getVar('SRC_URI') or '').split() |
1741 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] | 1797 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] |
1742 | if not git_uris: | 1798 | if not git_uris: |
@@ -1758,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata): | |||
1758 | return 'patch' | 1814 | return 'patch' |
1759 | 1815 | ||
1760 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): | 1816 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): |
1817 | import bb.data | ||
1818 | import bb.process | ||
1761 | srctree = workspace[recipename]['srctree'] | 1819 | srctree = workspace[recipename]['srctree'] |
1762 | if mode == 'auto': | 1820 | if mode == 'auto': |
1763 | mode = _guess_recipe_update_mode(srctree, rd) | 1821 | mode = _guess_recipe_update_mode(srctree, rd) |
@@ -1771,6 +1829,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver | |||
1771 | for line in stdout.splitlines(): | 1829 | for line in stdout.splitlines(): |
1772 | branchname = line[2:] | 1830 | branchname = line[2:] |
1773 | if line.startswith('* '): | 1831 | if line.startswith('* '): |
1832 | if 'HEAD' in line: | ||
1833 | raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"') | ||
1774 | startbranch = branchname | 1834 | startbranch = branchname |
1775 | if branchname.startswith(override_branch_prefix): | 1835 | if branchname.startswith(override_branch_prefix): |
1776 | override_branches.append(branchname) | 1836 | override_branches.append(branchname) |
@@ -1878,6 +1938,7 @@ def status(args, config, basepath, workspace): | |||
1878 | 1938 | ||
1879 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | 1939 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): |
1880 | """Reset one or more recipes""" | 1940 | """Reset one or more recipes""" |
1941 | import bb.process | ||
1881 | import oe.path | 1942 | import oe.path |
1882 | 1943 | ||
1883 | def clean_preferred_provider(pn, layerconf_path): | 1944 | def clean_preferred_provider(pn, layerconf_path): |
@@ -1890,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1890 | lines = f.readlines() | 1951 | lines = f.readlines() |
1891 | with open(new_layerconf_file, 'a') as nf: | 1952 | with open(new_layerconf_file, 'a') as nf: |
1892 | for line in lines: | 1953 | for line in lines: |
1893 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' | 1954 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$' |
1894 | if not re.match(pprovider_exp, line): | 1955 | if not re.match(pprovider_exp, line): |
1895 | nf.write(line) | 1956 | nf.write(line) |
1896 | else: | 1957 | else: |
@@ -1960,9 +2021,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1960 | shutil.rmtree(srctreebase) | 2021 | shutil.rmtree(srctreebase) |
1961 | else: | 2022 | else: |
1962 | # We don't want to risk wiping out any work in progress | 2023 | # We don't want to risk wiping out any work in progress |
1963 | logger.info('Leaving source tree %s as-is; if you no ' | 2024 | if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')): |
1964 | 'longer need it then please delete it manually' | 2025 | from datetime import datetime |
1965 | % srctreebase) | 2026 | preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S"))) |
2027 | logger.info('Preserving source tree in %s\nIf you no ' | ||
2028 | 'longer need it then please delete it manually.\n' | ||
2029 | 'It is also possible to reuse it via devtool source tree argument.' | ||
2030 | % preservesrc) | ||
2031 | bb.utils.mkdirhier(os.path.dirname(preservesrc)) | ||
2032 | shutil.move(srctreebase, preservesrc) | ||
2033 | else: | ||
2034 | logger.info('Leaving source tree %s as-is; if you no ' | ||
2035 | 'longer need it then please delete it manually' | ||
2036 | % srctreebase) | ||
1966 | else: | 2037 | else: |
1967 | # This is unlikely, but if it's empty we can just remove it | 2038 | # This is unlikely, but if it's empty we can just remove it |
1968 | os.rmdir(srctreebase) | 2039 | os.rmdir(srctreebase) |
@@ -1971,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1971 | 2042 | ||
1972 | def reset(args, config, basepath, workspace): | 2043 | def reset(args, config, basepath, workspace): |
1973 | """Entry point for the devtool 'reset' subcommand""" | 2044 | """Entry point for the devtool 'reset' subcommand""" |
1974 | import bb | ||
1975 | import shutil | ||
1976 | 2045 | ||
1977 | recipes = "" | 2046 | recipes = "" |
1978 | 2047 | ||
@@ -2222,6 +2291,7 @@ def register_commands(subparsers, context): | |||
2222 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") | 2291 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") |
2223 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') | 2292 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') |
2224 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") | 2293 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") |
2294 | parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true") | ||
2225 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') | 2295 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') |
2226 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") | 2296 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") |
2227 | group = parser_add.add_mutually_exclusive_group() | 2297 | group = parser_add.add_mutually_exclusive_group() |
@@ -2250,6 +2320,7 @@ def register_commands(subparsers, context): | |||
2250 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') | 2320 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') |
2251 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') | 2321 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') |
2252 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") | 2322 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") |
2323 | parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe') | ||
2253 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) | 2324 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) |
2254 | 2325 | ||
2255 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', | 2326 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', |
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index 826a3f955f..d9aca6e2db 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -32,9 +32,11 @@ def _run(cmd, cwd=''): | |||
32 | 32 | ||
33 | def _get_srctree(tmpdir): | 33 | def _get_srctree(tmpdir): |
34 | srctree = tmpdir | 34 | srctree = tmpdir |
35 | dirs = scriptutils.filter_src_subdirs(tmpdir) | 35 | dirs = os.listdir(tmpdir) |
36 | if len(dirs) == 1: | 36 | if len(dirs) == 1: |
37 | srctree = os.path.join(tmpdir, dirs[0]) | 37 | srctree = os.path.join(tmpdir, dirs[0]) |
38 | else: | ||
39 | raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs)) | ||
38 | return srctree | 40 | return srctree |
39 | 41 | ||
40 | def _copy_source_code(orig, dest): | 42 | def _copy_source_code(orig, dest): |
@@ -74,21 +76,21 @@ def _rename_recipe_dirs(oldpv, newpv, path): | |||
74 | bb.utils.rename(os.path.join(path, oldfile), | 76 | bb.utils.rename(os.path.join(path, oldfile), |
75 | os.path.join(path, newfile)) | 77 | os.path.join(path, newfile)) |
76 | 78 | ||
77 | def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): | 79 | def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path): |
78 | oldrecipe = os.path.basename(oldrecipe) | 80 | oldrecipe = os.path.basename(oldrecipe) |
79 | if oldrecipe.endswith('_%s.bb' % oldpv): | 81 | if oldrecipe.endswith('_%s.bb' % oldpv): |
80 | newrecipe = '%s_%s.bb' % (bpn, newpv) | 82 | newrecipe = '%s_%s.bb' % (pn, newpv) |
81 | if oldrecipe != newrecipe: | 83 | if oldrecipe != newrecipe: |
82 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) | 84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) |
83 | else: | 85 | else: |
84 | newrecipe = oldrecipe | 86 | newrecipe = oldrecipe |
85 | return os.path.join(path, newrecipe) | 87 | return os.path.join(path, newrecipe) |
86 | 88 | ||
87 | def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): | 89 | def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path): |
88 | _rename_recipe_dirs(oldpv, newpv, path) | 90 | _rename_recipe_dirs(oldpv, newpv, path) |
89 | return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) | 91 | return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path) |
90 | 92 | ||
91 | def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d): | 93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): |
92 | """Writes an append file""" | 94 | """Writes an append file""" |
93 | if not os.path.exists(rc): | 95 | if not os.path.exists(rc): |
94 | raise DevtoolError("bbappend not created because %s does not exist" % rc) | 96 | raise DevtoolError("bbappend not created because %s does not exist" % rc) |
@@ -104,6 +106,11 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d) | |||
104 | af = os.path.join(appendpath, '%s.bbappend' % brf) | 106 | af = os.path.join(appendpath, '%s.bbappend' % brf) |
105 | with open(af, 'w') as f: | 107 | with open(af, 'w') as f: |
106 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n') | 108 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n') |
109 | # Local files can be modified/tracked in separate subdir under srctree | ||
110 | # Mostly useful for packages with S != WORKDIR | ||
111 | f.write('FILESPATH:prepend := "%s:"\n' % | ||
112 | os.path.join(srctreebase, 'oe-local-files')) | ||
113 | f.write('# srctreebase: %s\n' % srctreebase) | ||
107 | f.write('inherit externalsrc\n') | 114 | f.write('inherit externalsrc\n') |
108 | f.write(('# NOTE: We use pn- overrides here to avoid affecting' | 115 | f.write(('# NOTE: We use pn- overrides here to avoid affecting' |
109 | 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) | 116 | 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) |
@@ -112,27 +119,24 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d) | |||
112 | if b_is_s: | 119 | if b_is_s: |
113 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) | 120 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
114 | f.write('\n') | 121 | f.write('\n') |
115 | if rev: | 122 | if revs: |
116 | f.write('# initial_rev: %s\n' % rev) | 123 | for name, rev in revs.items(): |
124 | f.write('# initial_rev %s: %s\n' % (name, rev)) | ||
117 | if copied: | 125 | if copied: |
118 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) | 126 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) |
119 | f.write('# original_files: %s\n' % ' '.join(copied)) | 127 | f.write('# original_files: %s\n' % ' '.join(copied)) |
120 | return af | 128 | return af |
121 | 129 | ||
122 | def _cleanup_on_error(rf, srctree): | 130 | def _cleanup_on_error(rd, srctree): |
123 | rfp = os.path.split(rf)[0] # recipe folder | 131 | if os.path.exists(rd): |
124 | rfpp = os.path.split(rfp)[0] # recipes folder | 132 | shutil.rmtree(rd) |
125 | if os.path.exists(rfp): | ||
126 | shutil.rmtree(rfp) | ||
127 | if not len(os.listdir(rfpp)): | ||
128 | os.rmdir(rfpp) | ||
129 | srctree = os.path.abspath(srctree) | 133 | srctree = os.path.abspath(srctree) |
130 | if os.path.exists(srctree): | 134 | if os.path.exists(srctree): |
131 | shutil.rmtree(srctree) | 135 | shutil.rmtree(srctree) |
132 | 136 | ||
133 | def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None): | 137 | def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None): |
134 | if rf and not keep_failure: | 138 | if not keep_failure: |
135 | _cleanup_on_error(rf, srctree) | 139 | _cleanup_on_error(rd, srctree) |
136 | logger.error(e) | 140 | logger.error(e) |
137 | if extramsg: | 141 | if extramsg: |
138 | logger.error(extramsg) | 142 | logger.error(extramsg) |
@@ -165,6 +169,7 @@ def _get_uri(rd): | |||
165 | 169 | ||
166 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): | 170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): |
167 | """Extract sources of a recipe with a new version""" | 171 | """Extract sources of a recipe with a new version""" |
172 | import oe.patch | ||
168 | 173 | ||
169 | def __run(cmd): | 174 | def __run(cmd): |
170 | """Simple wrapper which calls _run with srctree as cwd""" | 175 | """Simple wrapper which calls _run with srctree as cwd""" |
@@ -179,12 +184,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
179 | uri, rev = _get_uri(crd) | 184 | uri, rev = _get_uri(crd) |
180 | if srcrev: | 185 | if srcrev: |
181 | rev = srcrev | 186 | rev = srcrev |
187 | paths = [srctree] | ||
182 | if uri.startswith('git://') or uri.startswith('gitsm://'): | 188 | if uri.startswith('git://') or uri.startswith('gitsm://'): |
183 | __run('git fetch') | 189 | __run('git fetch') |
184 | __run('git checkout %s' % rev) | 190 | __run('git checkout %s' % rev) |
185 | __run('git tag -f devtool-base-new') | 191 | __run('git tag -f --no-sign devtool-base-new') |
186 | md5 = None | 192 | __run('git submodule update --recursive') |
187 | sha256 = None | 193 | __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'') |
194 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') | ||
195 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] | ||
196 | checksums = {} | ||
188 | _, _, _, _, _, params = bb.fetch2.decodeurl(uri) | 197 | _, _, _, _, _, params = bb.fetch2.decodeurl(uri) |
189 | srcsubdir_rel = params.get('destsuffix', 'git') | 198 | srcsubdir_rel = params.get('destsuffix', 'git') |
190 | if not srcbranch: | 199 | if not srcbranch: |
@@ -192,14 +201,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
192 | get_branch = [x.strip() for x in check_branch.splitlines()] | 201 | get_branch = [x.strip() for x in check_branch.splitlines()] |
193 | # Remove HEAD reference point and drop remote prefix | 202 | # Remove HEAD reference point and drop remote prefix |
194 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] | 203 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] |
195 | if 'master' in get_branch: | 204 | if len(get_branch) == 1: |
196 | # If it is master, we do not need to append 'branch=master' as this is default. | 205 | # If srcrev is on only ONE branch, then use that branch |
197 | # Even with the case where get_branch has multiple objects, if 'master' is one | ||
198 | # of them, we should default take from 'master' | ||
199 | srcbranch = '' | ||
200 | elif len(get_branch) == 1: | ||
201 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' | ||
202 | srcbranch = get_branch[0] | 206 | srcbranch = get_branch[0] |
207 | elif 'main' in get_branch: | ||
208 | # If srcrev is on multiple branches, then choose 'main' if it is one of them | ||
209 | srcbranch = 'main' | ||
210 | elif 'master' in get_branch: | ||
211 | # Otherwise choose 'master' if it is one of the branches | ||
212 | srcbranch = 'master' | ||
203 | else: | 213 | else: |
204 | # If get_branch contains more than one objects, then display error and exit. | 214 | # If get_branch contains more than one objects, then display error and exit. |
205 | mbrch = '\n ' + '\n '.join(get_branch) | 215 | mbrch = '\n ' + '\n '.join(get_branch) |
@@ -216,9 +226,6 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
216 | if ftmpdir and keep_temp: | 226 | if ftmpdir and keep_temp: |
217 | logger.info('Fetch temp directory is %s' % ftmpdir) | 227 | logger.info('Fetch temp directory is %s' % ftmpdir) |
218 | 228 | ||
219 | md5 = checksums['md5sum'] | ||
220 | sha256 = checksums['sha256sum'] | ||
221 | |||
222 | tmpsrctree = _get_srctree(tmpdir) | 229 | tmpsrctree = _get_srctree(tmpdir) |
223 | srctree = os.path.abspath(srctree) | 230 | srctree = os.path.abspath(srctree) |
224 | srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) | 231 | srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) |
@@ -250,31 +257,52 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
250 | useroptions = [] | 257 | useroptions = [] |
251 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | 258 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) |
252 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) | 259 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) |
253 | __run('git tag -f devtool-base-%s' % newpv) | 260 | __run('git tag -f --no-sign devtool-base-%s' % newpv) |
254 | 261 | ||
255 | (stdout, _) = __run('git rev-parse HEAD') | 262 | revs = {} |
256 | rev = stdout.rstrip() | 263 | for path in paths: |
264 | (stdout, _) = _run('git rev-parse HEAD', cwd=path) | ||
265 | revs[os.path.relpath(path, srctree)] = stdout.rstrip() | ||
257 | 266 | ||
258 | if no_patch: | 267 | if no_patch: |
259 | patches = oe.recipeutils.get_recipe_patches(crd) | 268 | patches = oe.recipeutils.get_recipe_patches(crd) |
260 | if patches: | 269 | if patches: |
261 | logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) | 270 | logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) |
262 | else: | 271 | else: |
263 | __run('git checkout devtool-patched -b %s' % branch) | 272 | for path in paths: |
264 | (stdout, _) = __run('git branch --list devtool-override-*') | 273 | _run('git checkout devtool-patched -b %s' % branch, cwd=path) |
265 | branches_to_rebase = [branch] + stdout.split() | 274 | (stdout, _) = _run('git branch --list devtool-override-*', cwd=path) |
266 | for b in branches_to_rebase: | 275 | branches_to_rebase = [branch] + stdout.split() |
267 | logger.info("Rebasing {} onto {}".format(b, rev)) | 276 | target_branch = revs[os.path.relpath(path, srctree)] |
268 | __run('git checkout %s' % b) | 277 | |
269 | try: | 278 | # There is a bug (or feature?) in git rebase where if a commit with |
270 | __run('git rebase %s' % rev) | 279 | # a note is fully rebased away by being part of an old commit, the |
271 | except bb.process.ExecutionError as e: | 280 | # note is still attached to the old commit. Avoid this by making |
272 | if 'conflict' in e.stdout: | 281 | # sure all old devtool related commits have a note attached to them |
273 | logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) | 282 | # (this assumes git config notes.rewriteMode is set to ignore). |
274 | __run('git rebase --abort') | 283 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) |
275 | else: | 284 | for rev in stdout.splitlines(): |
276 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) | 285 | if not oe.patch.GitApplyTree.getNotes(path, rev): |
277 | __run('git checkout %s' % branch) | 286 | oe.patch.GitApplyTree.addNote(path, rev, "dummy") |
287 | |||
288 | for b in branches_to_rebase: | ||
289 | logger.info("Rebasing {} onto {}".format(b, target_branch)) | ||
290 | _run('git checkout %s' % b, cwd=path) | ||
291 | try: | ||
292 | _run('git rebase %s' % target_branch, cwd=path) | ||
293 | except bb.process.ExecutionError as e: | ||
294 | if 'conflict' in e.stdout: | ||
295 | logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) | ||
296 | _run('git rebase --abort', cwd=path) | ||
297 | else: | ||
298 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) | ||
299 | |||
300 | # Remove any dummy notes added above. | ||
301 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) | ||
302 | for rev in stdout.splitlines(): | ||
303 | oe.patch.GitApplyTree.removeNote(path, rev, "dummy") | ||
304 | |||
305 | _run('git checkout %s' % branch, cwd=path) | ||
278 | 306 | ||
279 | if tmpsrctree: | 307 | if tmpsrctree: |
280 | if keep_temp: | 308 | if keep_temp: |
@@ -284,7 +312,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
284 | if tmpdir != tmpsrctree: | 312 | if tmpdir != tmpsrctree: |
285 | shutil.rmtree(tmpdir) | 313 | shutil.rmtree(tmpdir) |
286 | 314 | ||
287 | return (rev, md5, sha256, srcbranch, srcsubdir_rel) | 315 | return (revs, checksums, srcbranch, srcsubdir_rel) |
288 | 316 | ||
289 | def _add_license_diff_to_recipe(path, diff): | 317 | def _add_license_diff_to_recipe(path, diff): |
290 | notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. | 318 | notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. |
@@ -305,22 +333,22 @@ def _add_license_diff_to_recipe(path, diff): | |||
305 | f.write("\n#\n\n".encode()) | 333 | f.write("\n#\n\n".encode()) |
306 | f.write(orig_content) | 334 | f.write(orig_content) |
307 | 335 | ||
308 | def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): | 336 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): |
309 | """Creates the new recipe under workspace""" | 337 | """Creates the new recipe under workspace""" |
310 | 338 | ||
311 | bpn = rd.getVar('BPN') | 339 | pn = rd.getVar('PN') |
312 | path = os.path.join(workspace, 'recipes', bpn) | 340 | path = os.path.join(workspace, 'recipes', pn) |
313 | bb.utils.mkdirhier(path) | 341 | bb.utils.mkdirhier(path) |
314 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) | 342 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) |
315 | if not copied: | 343 | if not copied: |
316 | raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) | 344 | raise DevtoolError('Internal error - no files were copied for recipe %s' % pn) |
317 | logger.debug('Copied %s to %s' % (copied, path)) | 345 | logger.debug('Copied %s to %s' % (copied, path)) |
318 | 346 | ||
319 | oldpv = rd.getVar('PV') | 347 | oldpv = rd.getVar('PV') |
320 | if not newpv: | 348 | if not newpv: |
321 | newpv = oldpv | 349 | newpv = oldpv |
322 | origpath = rd.getVar('FILE') | 350 | origpath = rd.getVar('FILE') |
323 | fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) | 351 | fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path) |
324 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) | 352 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) |
325 | 353 | ||
326 | newvalues = {} | 354 | newvalues = {} |
@@ -336,7 +364,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
336 | replacing = True | 364 | replacing = True |
337 | new_src_uri = [] | 365 | new_src_uri = [] |
338 | for entry in src_uri: | 366 | for entry in src_uri: |
339 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) | 367 | try: |
368 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) | ||
369 | except bb.fetch2.MalformedUrl as e: | ||
370 | raise DevtoolError("Could not decode SRC_URI: {}".format(e)) | ||
340 | if replacing and scheme in ['git', 'gitsm']: | 371 | if replacing and scheme in ['git', 'gitsm']: |
341 | branch = params.get('branch', 'master') | 372 | branch = params.get('branch', 'master') |
342 | if rd.expand(branch) != srcbranch: | 373 | if rd.expand(branch) != srcbranch: |
@@ -374,30 +405,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
374 | addnames.append(params['name']) | 405 | addnames.append(params['name']) |
375 | # Find what's been set in the original recipe | 406 | # Find what's been set in the original recipe |
376 | oldnames = [] | 407 | oldnames = [] |
408 | oldsums = [] | ||
377 | noname = False | 409 | noname = False |
378 | for varflag in rd.getVarFlags('SRC_URI'): | 410 | for varflag in rd.getVarFlags('SRC_URI'): |
379 | if varflag.endswith(('.md5sum', '.sha256sum')): | 411 | for checksum in checksums: |
380 | name = varflag.rsplit('.', 1)[0] | 412 | if varflag.endswith('.' + checksum): |
381 | if name not in oldnames: | 413 | name = varflag.rsplit('.', 1)[0] |
382 | oldnames.append(name) | 414 | if name not in oldnames: |
383 | elif varflag in ['md5sum', 'sha256sum']: | 415 | oldnames.append(name) |
384 | noname = True | 416 | oldsums.append(checksum) |
417 | elif varflag == checksum: | ||
418 | noname = True | ||
419 | oldsums.append(checksum) | ||
385 | # Even if SRC_URI has named entries it doesn't have to actually use the name | 420 | # Even if SRC_URI has named entries it doesn't have to actually use the name |
386 | if noname and addnames and addnames[0] not in oldnames: | 421 | if noname and addnames and addnames[0] not in oldnames: |
387 | addnames = [] | 422 | addnames = [] |
388 | # Drop any old names (the name actually might include ${PV}) | 423 | # Drop any old names (the name actually might include ${PV}) |
389 | for name in oldnames: | 424 | for name in oldnames: |
390 | if name not in newnames: | 425 | if name not in newnames: |
391 | newvalues['SRC_URI[%s.md5sum]' % name] = None | 426 | for checksum in oldsums: |
392 | newvalues['SRC_URI[%s.sha256sum]' % name] = None | 427 | newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None |
393 | 428 | ||
394 | if sha256: | 429 | nameprefix = '%s.' % addnames[0] if addnames else '' |
395 | if addnames: | 430 | |
396 | nameprefix = '%s.' % addnames[0] | 431 | # md5sum is deprecated, remove any traces of it. If it was the only old |
397 | else: | 432 | # checksum, then replace it with the default checksums. |
398 | nameprefix = '' | 433 | if 'md5sum' in oldsums: |
399 | newvalues['SRC_URI[%smd5sum]' % nameprefix] = None | 434 | newvalues['SRC_URI[%smd5sum]' % nameprefix] = None |
400 | newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256 | 435 | oldsums.remove('md5sum') |
436 | if not oldsums: | ||
437 | oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] | ||
438 | |||
439 | for checksum in oldsums: | ||
440 | newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum] | ||
401 | 441 | ||
402 | if srcsubdir_new != srcsubdir_old: | 442 | if srcsubdir_new != srcsubdir_old: |
403 | s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) | 443 | s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) |
@@ -422,10 +462,11 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
422 | newvalues["LIC_FILES_CHKSUM"] = newlicchksum | 462 | newvalues["LIC_FILES_CHKSUM"] = newlicchksum |
423 | _add_license_diff_to_recipe(fullpath, license_diff) | 463 | _add_license_diff_to_recipe(fullpath, license_diff) |
424 | 464 | ||
465 | tinfoil.modified_files() | ||
425 | try: | 466 | try: |
426 | rd = tinfoil.parse_recipe_file(fullpath, False) | 467 | rd = tinfoil.parse_recipe_file(fullpath, False) |
427 | except bb.tinfoil.TinfoilCommandFailed as e: | 468 | except bb.tinfoil.TinfoilCommandFailed as e: |
428 | _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed') | 469 | _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed') |
429 | oe.recipeutils.patch_recipe(rd, fullpath, newvalues) | 470 | oe.recipeutils.patch_recipe(rd, fullpath, newvalues) |
430 | 471 | ||
431 | return fullpath, copied | 472 | return fullpath, copied |
@@ -434,7 +475,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
434 | def _check_git_config(): | 475 | def _check_git_config(): |
435 | def getconfig(name): | 476 | def getconfig(name): |
436 | try: | 477 | try: |
437 | value = bb.process.run('git config --global %s' % name)[0].strip() | 478 | value = bb.process.run('git config %s' % name)[0].strip() |
438 | except bb.process.ExecutionError as e: | 479 | except bb.process.ExecutionError as e: |
439 | if e.exitcode == 1: | 480 | if e.exitcode == 1: |
440 | value = None | 481 | value = None |
@@ -494,6 +535,15 @@ def _generate_license_diff(old_licenses, new_licenses): | |||
494 | diff = diff + line | 535 | diff = diff + line |
495 | return diff | 536 | return diff |
496 | 537 | ||
538 | def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil): | ||
539 | tasks = [] | ||
540 | for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split(): | ||
541 | logger.info('Running extra recipe upgrade task: %s' % task) | ||
542 | res = tinfoil.build_targets(pn, task, handle_events=True) | ||
543 | |||
544 | if not res: | ||
545 | raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn)) | ||
546 | |||
497 | def upgrade(args, config, basepath, workspace): | 547 | def upgrade(args, config, basepath, workspace): |
498 | """Entry point for the devtool 'upgrade' subcommand""" | 548 | """Entry point for the devtool 'upgrade' subcommand""" |
499 | 549 | ||
@@ -521,14 +571,7 @@ def upgrade(args, config, basepath, workspace): | |||
521 | else: | 571 | else: |
522 | srctree = standard.get_default_srctree(config, pn) | 572 | srctree = standard.get_default_srctree(config, pn) |
523 | 573 | ||
524 | # Check that recipe isn't using a shared workdir | 574 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
525 | s = os.path.abspath(rd.getVar('S')) | ||
526 | workdir = os.path.abspath(rd.getVar('WORKDIR')) | ||
527 | srctree_s = srctree | ||
528 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | ||
529 | # Handle if S is set to a subdirectory of the source | ||
530 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | ||
531 | srctree_s = os.path.join(srctree, srcsubdir) | ||
532 | 575 | ||
533 | # try to automatically discover latest version and revision if not provided on command line | 576 | # try to automatically discover latest version and revision if not provided on command line |
534 | if not args.version and not args.srcrev: | 577 | if not args.version and not args.srcrev: |
@@ -561,22 +604,23 @@ def upgrade(args, config, basepath, workspace): | |||
561 | rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | 604 | rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) |
562 | old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) | 605 | old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) |
563 | logger.info('Extracting upgraded version source...') | 606 | logger.info('Extracting upgraded version source...') |
564 | rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, | 607 | rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, |
565 | args.srcrev, args.srcbranch, args.branch, args.keep_temp, | 608 | args.srcrev, args.srcbranch, args.branch, args.keep_temp, |
566 | tinfoil, rd) | 609 | tinfoil, rd) |
567 | new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) | 610 | new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) |
568 | license_diff = _generate_license_diff(old_licenses, new_licenses) | 611 | license_diff = _generate_license_diff(old_licenses, new_licenses) |
569 | rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) | 612 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) |
570 | except bb.process.CmdError as e: | 613 | except (bb.process.CmdError, DevtoolError) as e: |
571 | _upgrade_error(e, rf, srctree, args.keep_failure) | 614 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN')) |
572 | except DevtoolError as e: | 615 | _upgrade_error(e, recipedir, srctree, args.keep_failure) |
573 | _upgrade_error(e, rf, srctree, args.keep_failure) | ||
574 | standard._add_md5(config, pn, os.path.dirname(rf)) | 616 | standard._add_md5(config, pn, os.path.dirname(rf)) |
575 | 617 | ||
576 | af = _write_append(rf, srctree_s, args.same_dir, args.no_same_dir, rev2, | 618 | af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2, |
577 | copied, config.workspace_path, rd) | 619 | copied, config.workspace_path, rd) |
578 | standard._add_md5(config, pn, af) | 620 | standard._add_md5(config, pn, af) |
579 | 621 | ||
622 | _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil) | ||
623 | |||
580 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 624 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
581 | 625 | ||
582 | logger.info('Upgraded source extracted to %s' % srctree) | 626 | logger.info('Upgraded source extracted to %s' % srctree) |
@@ -611,18 +655,28 @@ def latest_version(args, config, basepath, workspace): | |||
611 | return 0 | 655 | return 0 |
612 | 656 | ||
613 | def check_upgrade_status(args, config, basepath, workspace): | 657 | def check_upgrade_status(args, config, basepath, workspace): |
658 | def _print_status(recipe): | ||
659 | print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'], | ||
660 | recipe['cur_ver'], | ||
661 | recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"), | ||
662 | recipe['maintainer'], | ||
663 | recipe['revision'] if recipe['revision'] != 'N/A' else "", | ||
664 | "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else "")) | ||
614 | if not args.recipe: | 665 | if not args.recipe: |
615 | logger.info("Checking the upstream status for all recipes may take a few minutes") | 666 | logger.info("Checking the upstream status for all recipes may take a few minutes") |
616 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) | 667 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) |
617 | for result in results: | 668 | for recipegroup in results: |
618 | # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason | 669 | upgrades = [r for r in recipegroup if r['status'] != 'MATCH'] |
619 | if args.all or result[1] != 'MATCH': | 670 | currents = [r for r in recipegroup if r['status'] == 'MATCH'] |
620 | logger.info("{:25} {:15} {:15} {} {} {}".format( result[0], | 671 | if len(upgrades) > 1: |
621 | result[2], | 672 | print("These recipes need to be upgraded together {") |
622 | result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), | 673 | for r in sorted(upgrades, key=lambda r:r['pn']): |
623 | result[4], | 674 | _print_status(r) |
624 | result[5] if result[5] != 'N/A' else "", | 675 | if len(upgrades) > 1: |
625 | "cannot be updated due to: %s" %(result[6]) if result[6] else "")) | 676 | print("}") |
677 | for r in currents: | ||
678 | if args.all: | ||
679 | _print_status(r) | ||
626 | 680 | ||
627 | def register_commands(subparsers, context): | 681 | def register_commands(subparsers, context): |
628 | """Register devtool subcommands from this plugin""" | 682 | """Register devtool subcommands from this plugin""" |
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py index 964817766b..bf39f71b11 100644 --- a/scripts/lib/devtool/utilcmds.py +++ b/scripts/lib/devtool/utilcmds.py | |||
@@ -64,7 +64,7 @@ def configure_help(args, config, basepath, workspace): | |||
64 | b = rd.getVar('B') | 64 | b = rd.getVar('B') |
65 | s = rd.getVar('S') | 65 | s = rd.getVar('S') |
66 | configurescript = os.path.join(s, 'configure') | 66 | configurescript = os.path.join(s, 'configure') |
67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) | 67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd)) |
68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') | 68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') |
69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') | 69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') |
70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') | 70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') |
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py index 88ed8c5f01..041d79f162 100644 --- a/scripts/lib/recipetool/append.py +++ b/scripts/lib/recipetool/append.py | |||
@@ -18,6 +18,7 @@ import shutil | |||
18 | import scriptutils | 18 | import scriptutils |
19 | import errno | 19 | import errno |
20 | from collections import defaultdict | 20 | from collections import defaultdict |
21 | import difflib | ||
21 | 22 | ||
22 | logger = logging.getLogger('recipetool') | 23 | logger = logging.getLogger('recipetool') |
23 | 24 | ||
@@ -100,7 +101,7 @@ def determine_file_source(targetpath, rd): | |||
100 | import oe.recipeutils | 101 | import oe.recipeutils |
101 | 102 | ||
102 | # See if it's in do_install for the recipe | 103 | # See if it's in do_install for the recipe |
103 | workdir = rd.getVar('WORKDIR') | 104 | unpackdir = rd.getVar('UNPACKDIR') |
104 | src_uri = rd.getVar('SRC_URI') | 105 | src_uri = rd.getVar('SRC_URI') |
105 | srcfile = '' | 106 | srcfile = '' |
106 | modpatches = [] | 107 | modpatches = [] |
@@ -112,9 +113,9 @@ def determine_file_source(targetpath, rd): | |||
112 | if not srcpath.startswith('/'): | 113 | if not srcpath.startswith('/'): |
113 | # Handle non-absolute path | 114 | # Handle non-absolute path |
114 | srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) | 115 | srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) |
115 | if srcpath.startswith(workdir): | 116 | if srcpath.startswith(unpackdir): |
116 | # OK, now we have the source file name, look for it in SRC_URI | 117 | # OK, now we have the source file name, look for it in SRC_URI |
117 | workdirfile = os.path.relpath(srcpath, workdir) | 118 | workdirfile = os.path.relpath(srcpath, unpackdir) |
118 | # FIXME this is where we ought to have some code in the fetcher, because this is naive | 119 | # FIXME this is where we ought to have some code in the fetcher, because this is naive |
119 | for item in src_uri.split(): | 120 | for item in src_uri.split(): |
120 | localpath = bb.fetch2.localpath(item, rd) | 121 | localpath = bb.fetch2.localpath(item, rd) |
@@ -299,7 +300,10 @@ def appendfile(args): | |||
299 | if st.st_mode & stat.S_IXUSR: | 300 | if st.st_mode & stat.S_IXUSR: |
300 | perms = '0755' | 301 | perms = '0755' |
301 | install = {args.newfile: (args.targetpath, perms)} | 302 | install = {args.newfile: (args.targetpath, perms)} |
302 | oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: sourcepath}, install, wildcardver=args.wildcard_version, machine=args.machine) | 303 | if sourcepath: |
304 | sourcepath = os.path.basename(sourcepath) | ||
305 | oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine) | ||
306 | tinfoil.modified_files() | ||
303 | return 0 | 307 | return 0 |
304 | else: | 308 | else: |
305 | if alternative_pns: | 309 | if alternative_pns: |
@@ -313,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None): | |||
313 | import oe.recipeutils | 317 | import oe.recipeutils |
314 | 318 | ||
315 | srcdir = rd.getVar('S') | 319 | srcdir = rd.getVar('S') |
316 | workdir = rd.getVar('WORKDIR') | 320 | unpackdir = rd.getVar('UNPACKDIR') |
317 | 321 | ||
318 | import bb.fetch | 322 | import bb.fetch |
319 | simplified = {} | 323 | simplified = {} |
@@ -327,35 +331,57 @@ def appendsrc(args, files, rd, extralines=None): | |||
327 | 331 | ||
328 | copyfiles = {} | 332 | copyfiles = {} |
329 | extralines = extralines or [] | 333 | extralines = extralines or [] |
334 | params = [] | ||
330 | for newfile, srcfile in files.items(): | 335 | for newfile, srcfile in files.items(): |
331 | src_destdir = os.path.dirname(srcfile) | 336 | src_destdir = os.path.dirname(srcfile) |
332 | if not args.use_workdir: | 337 | if not args.use_workdir: |
333 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): | 338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): |
334 | srcdir = os.path.join(workdir, 'git') | 339 | srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX')) |
335 | if not bb.data.inherits_class('kernel-yocto', rd): | 340 | if not bb.data.inherits_class('kernel-yocto', rd): |
336 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') | 341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}') |
337 | src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) | 342 | src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir) |
338 | src_destdir = os.path.normpath(src_destdir) | 343 | src_destdir = os.path.normpath(src_destdir) |
339 | 344 | ||
340 | source_uri = 'file://{0}'.format(os.path.basename(srcfile)) | ||
341 | if src_destdir and src_destdir != '.': | 345 | if src_destdir and src_destdir != '.': |
342 | source_uri += ';subdir={0}'.format(src_destdir) | 346 | params.append({'subdir': src_destdir}) |
343 | |||
344 | simple = bb.fetch.URI(source_uri) | ||
345 | simple.params = {} | ||
346 | simple_str = str(simple) | ||
347 | if simple_str in simplified: | ||
348 | existing = simplified[simple_str] | ||
349 | if source_uri != existing: | ||
350 | logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing)) | ||
351 | else: | ||
352 | logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri)) | ||
353 | else: | 347 | else: |
354 | extralines.append('SRC_URI += {0}'.format(source_uri)) | 348 | params.append({}) |
355 | copyfiles[newfile] = srcfile | 349 | |
356 | 350 | copyfiles[newfile] = {'newname' : os.path.basename(srcfile)} | |
357 | oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines) | 351 | |
358 | 352 | dry_run_output = None | |
353 | dry_run_outdir = None | ||
354 | if args.dry_run: | ||
355 | import tempfile | ||
356 | dry_run_output = tempfile.TemporaryDirectory(prefix='devtool') | ||
357 | dry_run_outdir = dry_run_output.name | ||
358 | |||
359 | appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params, | ||
360 | redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe) | ||
361 | if not appendfile: | ||
362 | return | ||
363 | if args.dry_run: | ||
364 | output = '' | ||
365 | appendfilename = os.path.basename(appendfile) | ||
366 | newappendfile = appendfile | ||
367 | if appendfile and os.path.exists(appendfile): | ||
368 | with open(appendfile, 'r') as f: | ||
369 | oldlines = f.readlines() | ||
370 | else: | ||
371 | appendfile = '/dev/null' | ||
372 | oldlines = [] | ||
373 | |||
374 | with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f: | ||
375 | newlines = f.readlines() | ||
376 | diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile) | ||
377 | difflines = list(diff) | ||
378 | if difflines: | ||
379 | output += ''.join(difflines) | ||
380 | if output: | ||
381 | logger.info('Diff of changed files:\n%s' % output) | ||
382 | else: | ||
383 | logger.info('No changed files') | ||
384 | tinfoil.modified_files() | ||
359 | 385 | ||
360 | def appendsrcfiles(parser, args): | 386 | def appendsrcfiles(parser, args): |
361 | recipedata = _parse_recipe(args.recipe, tinfoil) | 387 | recipedata = _parse_recipe(args.recipe, tinfoil) |
@@ -435,6 +461,8 @@ def register_commands(subparsers): | |||
435 | help='Create/update a bbappend to add or replace source files', | 461 | help='Create/update a bbappend to add or replace source files', |
436 | description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.') | 462 | description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.') |
437 | parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path) | 463 | parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path) |
464 | parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true') | ||
465 | parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true') | ||
438 | parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path) | 466 | parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path) |
439 | parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True) | 467 | parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True) |
440 | 468 | ||
@@ -442,6 +470,8 @@ def register_commands(subparsers): | |||
442 | parents=[common_src], | 470 | parents=[common_src], |
443 | help='Create/update a bbappend to add or replace a source file', | 471 | help='Create/update a bbappend to add or replace a source file', |
444 | description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.') | 472 | description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.') |
473 | parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true') | ||
474 | parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true') | ||
445 | parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path) | 475 | parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path) |
446 | parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path) | 476 | parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path) |
447 | parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True) | 477 | parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True) |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index 4f6e01c639..ef0ba974a9 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
@@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit | |||
18 | import hashlib | 18 | import hashlib |
19 | import bb.fetch2 | 19 | import bb.fetch2 |
20 | logger = logging.getLogger('recipetool') | 20 | logger = logging.getLogger('recipetool') |
21 | from oe.license import tidy_licenses | ||
22 | from oe.license_finder import find_licenses | ||
21 | 23 | ||
22 | tinfoil = None | 24 | tinfoil = None |
23 | plugins = None | 25 | plugins = None |
@@ -389,9 +391,6 @@ def reformat_git_uri(uri): | |||
389 | parms.update({('protocol', 'ssh')}) | 391 | parms.update({('protocol', 'ssh')}) |
390 | elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): | 392 | elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): |
391 | parms.update({('protocol', scheme)}) | 393 | parms.update({('protocol', scheme)}) |
392 | # We assume 'master' branch if not set | ||
393 | if not 'branch' in parms: | ||
394 | parms.update({('branch', 'master')}) | ||
395 | # Always append 'git://' | 394 | # Always append 'git://' |
396 | fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) | 395 | fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) |
397 | return fUrl | 396 | return fUrl |
@@ -426,6 +425,36 @@ def create_recipe(args): | |||
426 | storeTagName = '' | 425 | storeTagName = '' |
427 | pv_srcpv = False | 426 | pv_srcpv = False |
428 | 427 | ||
428 | handled = [] | ||
429 | classes = [] | ||
430 | |||
431 | # Find all plugins that want to register handlers | ||
432 | logger.debug('Loading recipe handlers') | ||
433 | raw_handlers = [] | ||
434 | for plugin in plugins: | ||
435 | if hasattr(plugin, 'register_recipe_handlers'): | ||
436 | plugin.register_recipe_handlers(raw_handlers) | ||
437 | # Sort handlers by priority | ||
438 | handlers = [] | ||
439 | for i, handler in enumerate(raw_handlers): | ||
440 | if isinstance(handler, tuple): | ||
441 | handlers.append((handler[0], handler[1], i)) | ||
442 | else: | ||
443 | handlers.append((handler, 0, i)) | ||
444 | handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True) | ||
445 | for handler, priority, _ in handlers: | ||
446 | logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority)) | ||
447 | setattr(handler, '_devtool', args.devtool) | ||
448 | handlers = [item[0] for item in handlers] | ||
449 | |||
450 | fetchuri = None | ||
451 | for handler in handlers: | ||
452 | if hasattr(handler, 'process_url'): | ||
453 | ret = handler.process_url(args, classes, handled, extravalues) | ||
454 | if 'url' in handled and ret: | ||
455 | fetchuri = ret | ||
456 | break | ||
457 | |||
429 | if os.path.isfile(source): | 458 | if os.path.isfile(source): |
430 | source = 'file://%s' % os.path.abspath(source) | 459 | source = 'file://%s' % os.path.abspath(source) |
431 | 460 | ||
@@ -434,11 +463,12 @@ def create_recipe(args): | |||
434 | if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): | 463 | if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): |
435 | logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') | 464 | logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') |
436 | # Fetch a URL | 465 | # Fetch a URL |
437 | fetchuri = reformat_git_uri(urldefrag(source)[0]) | 466 | if not fetchuri: |
467 | fetchuri = reformat_git_uri(urldefrag(source)[0]) | ||
438 | if args.binary: | 468 | if args.binary: |
439 | # Assume the archive contains the directory structure verbatim | 469 | # Assume the archive contains the directory structure verbatim |
440 | # so we need to extract to a subdirectory | 470 | # so we need to extract to a subdirectory |
441 | fetchuri += ';subdir=${BP}' | 471 | fetchuri += ';subdir=${BPN}' |
442 | srcuri = fetchuri | 472 | srcuri = fetchuri |
443 | rev_re = re.compile(';rev=([^;]+)') | 473 | rev_re = re.compile(';rev=([^;]+)') |
444 | res = rev_re.search(srcuri) | 474 | res = rev_re.search(srcuri) |
@@ -481,6 +511,9 @@ def create_recipe(args): | |||
481 | storeTagName = params['tag'] | 511 | storeTagName = params['tag'] |
482 | params['nobranch'] = '1' | 512 | params['nobranch'] = '1' |
483 | del params['tag'] | 513 | del params['tag'] |
514 | # Assume 'master' branch if not set | ||
515 | if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params: | ||
516 | params['branch'] = 'master' | ||
484 | fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) | 517 | fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) |
485 | 518 | ||
486 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') | 519 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') |
@@ -497,7 +530,7 @@ def create_recipe(args): | |||
497 | if ftmpdir and args.keep_temp: | 530 | if ftmpdir and args.keep_temp: |
498 | logger.info('Fetch temp directory is %s' % ftmpdir) | 531 | logger.info('Fetch temp directory is %s' % ftmpdir) |
499 | 532 | ||
500 | dirlist = scriptutils.filter_src_subdirs(srctree) | 533 | dirlist = os.listdir(srctree) |
501 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) | 534 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) |
502 | if len(dirlist) == 1: | 535 | if len(dirlist) == 1: |
503 | singleitem = os.path.join(srctree, dirlist[0]) | 536 | singleitem = os.path.join(srctree, dirlist[0]) |
@@ -530,10 +563,9 @@ def create_recipe(args): | |||
530 | # Remove HEAD reference point and drop remote prefix | 563 | # Remove HEAD reference point and drop remote prefix |
531 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] | 564 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] |
532 | if 'master' in get_branch: | 565 | if 'master' in get_branch: |
533 | # If it is master, we do not need to append 'branch=master' as this is default. | ||
534 | # Even with the case where get_branch has multiple objects, if 'master' is one | 566 | # Even with the case where get_branch has multiple objects, if 'master' is one |
535 | # of them, we should default take from 'master' | 567 | # of them, we should default take from 'master' |
536 | srcbranch = '' | 568 | srcbranch = 'master' |
537 | elif len(get_branch) == 1: | 569 | elif len(get_branch) == 1: |
538 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' | 570 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' |
539 | srcbranch = get_branch[0] | 571 | srcbranch = get_branch[0] |
@@ -546,8 +578,8 @@ def create_recipe(args): | |||
546 | # Since we might have a value in srcbranch, we need to | 578 | # Since we might have a value in srcbranch, we need to |
547 | # recontruct the srcuri to include 'branch' in params. | 579 | # recontruct the srcuri to include 'branch' in params. |
548 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) | 580 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) |
549 | if srcbranch: | 581 | if scheme in ['git', 'gitsm']: |
550 | params['branch'] = srcbranch | 582 | params['branch'] = srcbranch or 'master' |
551 | 583 | ||
552 | if storeTagName and scheme in ['git', 'gitsm']: | 584 | if storeTagName and scheme in ['git', 'gitsm']: |
553 | # Check srcrev using tag and check validity of the tag | 585 | # Check srcrev using tag and check validity of the tag |
@@ -606,8 +638,7 @@ def create_recipe(args): | |||
606 | splitline = line.split() | 638 | splitline = line.split() |
607 | if len(splitline) > 1: | 639 | if len(splitline) > 1: |
608 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): | 640 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): |
609 | srcuri = reformat_git_uri(splitline[1]) | 641 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' |
610 | srcsubdir = 'git' | ||
611 | break | 642 | break |
612 | 643 | ||
613 | if args.src_subdir: | 644 | if args.src_subdir: |
@@ -639,8 +670,6 @@ def create_recipe(args): | |||
639 | # We'll come back and replace this later in handle_license_vars() | 670 | # We'll come back and replace this later in handle_license_vars() |
640 | lines_before.append('##LICENSE_PLACEHOLDER##') | 671 | lines_before.append('##LICENSE_PLACEHOLDER##') |
641 | 672 | ||
642 | handled = [] | ||
643 | classes = [] | ||
644 | 673 | ||
645 | # FIXME This is kind of a hack, we probably ought to be using bitbake to do this | 674 | # FIXME This is kind of a hack, we probably ought to be using bitbake to do this |
646 | pn = None | 675 | pn = None |
@@ -678,8 +707,10 @@ def create_recipe(args): | |||
678 | if not srcuri: | 707 | if not srcuri: |
679 | lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') | 708 | lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') |
680 | lines_before.append('SRC_URI = "%s"' % srcuri) | 709 | lines_before.append('SRC_URI = "%s"' % srcuri) |
710 | shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] | ||
681 | for key, value in sorted(checksums.items()): | 711 | for key, value in sorted(checksums.items()): |
682 | lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) | 712 | if key in shown_checksums: |
713 | lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) | ||
683 | if srcuri and supports_srcrev(srcuri): | 714 | if srcuri and supports_srcrev(srcuri): |
684 | lines_before.append('') | 715 | lines_before.append('') |
685 | lines_before.append('# Modify these as desired') | 716 | lines_before.append('# Modify these as desired') |
@@ -691,7 +722,7 @@ def create_recipe(args): | |||
691 | srcpvprefix = 'svnr' | 722 | srcpvprefix = 'svnr' |
692 | else: | 723 | else: |
693 | srcpvprefix = scheme | 724 | srcpvprefix = scheme |
694 | lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix)) | 725 | lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix)) |
695 | pv_srcpv = True | 726 | pv_srcpv = True |
696 | if not args.autorev and srcrev == '${AUTOREV}': | 727 | if not args.autorev and srcrev == '${AUTOREV}': |
697 | if os.path.exists(os.path.join(srctree, '.git')): | 728 | if os.path.exists(os.path.join(srctree, '.git')): |
@@ -705,7 +736,7 @@ def create_recipe(args): | |||
705 | if srcsubdir and not args.binary: | 736 | if srcsubdir and not args.binary: |
706 | # (for binary packages we explicitly specify subdir= when fetching to | 737 | # (for binary packages we explicitly specify subdir= when fetching to |
707 | # match the default value of S, so we don't need to set it in that case) | 738 | # match the default value of S, so we don't need to set it in that case) |
708 | lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) | 739 | lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir) |
709 | lines_before.append('') | 740 | lines_before.append('') |
710 | 741 | ||
711 | if pkgarch: | 742 | if pkgarch: |
@@ -719,25 +750,6 @@ def create_recipe(args): | |||
719 | if args.npm_dev: | 750 | if args.npm_dev: |
720 | extravalues['NPM_INSTALL_DEV'] = 1 | 751 | extravalues['NPM_INSTALL_DEV'] = 1 |
721 | 752 | ||
722 | # Find all plugins that want to register handlers | ||
723 | logger.debug('Loading recipe handlers') | ||
724 | raw_handlers = [] | ||
725 | for plugin in plugins: | ||
726 | if hasattr(plugin, 'register_recipe_handlers'): | ||
727 | plugin.register_recipe_handlers(raw_handlers) | ||
728 | # Sort handlers by priority | ||
729 | handlers = [] | ||
730 | for i, handler in enumerate(raw_handlers): | ||
731 | if isinstance(handler, tuple): | ||
732 | handlers.append((handler[0], handler[1], i)) | ||
733 | else: | ||
734 | handlers.append((handler, 0, i)) | ||
735 | handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True) | ||
736 | for handler, priority, _ in handlers: | ||
737 | logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority)) | ||
738 | setattr(handler, '_devtool', args.devtool) | ||
739 | handlers = [item[0] for item in handlers] | ||
740 | |||
741 | # Apply the handlers | 753 | # Apply the handlers |
742 | if args.binary: | 754 | if args.binary: |
743 | classes.append('bin_package') | 755 | classes.append('bin_package') |
@@ -746,9 +758,14 @@ def create_recipe(args): | |||
746 | for handler in handlers: | 758 | for handler in handlers: |
747 | handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) | 759 | handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) |
748 | 760 | ||
761 | # native and nativesdk classes are special and must be inherited last | ||
762 | # If present, put them at the end of the classes list | ||
763 | classes.sort(key=lambda c: c in ("native", "nativesdk")) | ||
764 | |||
749 | extrafiles = extravalues.pop('extrafiles', {}) | 765 | extrafiles = extravalues.pop('extrafiles', {}) |
750 | extra_pn = extravalues.pop('PN', None) | 766 | extra_pn = extravalues.pop('PN', None) |
751 | extra_pv = extravalues.pop('PV', None) | 767 | extra_pv = extravalues.pop('PV', None) |
768 | run_tasks = extravalues.pop('run_tasks', "").split() | ||
752 | 769 | ||
753 | if extra_pv and not realpv: | 770 | if extra_pv and not realpv: |
754 | realpv = extra_pv | 771 | realpv = extra_pv |
@@ -809,7 +826,8 @@ def create_recipe(args): | |||
809 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) | 826 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) |
810 | bb.utils.mkdirhier(extraoutdir) | 827 | bb.utils.mkdirhier(extraoutdir) |
811 | for destfn, extrafile in extrafiles.items(): | 828 | for destfn, extrafile in extrafiles.items(): |
812 | shutil.move(extrafile, os.path.join(extraoutdir, destfn)) | 829 | fn = destfn.format(pn=pn, pv=realpv) |
830 | shutil.move(extrafile, os.path.join(extraoutdir, fn)) | ||
813 | 831 | ||
814 | lines = lines_before | 832 | lines = lines_before |
815 | lines_before = [] | 833 | lines_before = [] |
@@ -824,7 +842,7 @@ def create_recipe(args): | |||
824 | line = line.replace(realpv, '${PV}') | 842 | line = line.replace(realpv, '${PV}') |
825 | if pn: | 843 | if pn: |
826 | line = line.replace(pn, '${BPN}') | 844 | line = line.replace(pn, '${BPN}') |
827 | if line == 'S = "${WORKDIR}/${BPN}-${PV}"': | 845 | if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line: |
828 | skipblank = True | 846 | skipblank = True |
829 | continue | 847 | continue |
830 | elif line.startswith('SRC_URI = '): | 848 | elif line.startswith('SRC_URI = '): |
@@ -870,8 +888,10 @@ def create_recipe(args): | |||
870 | outlines.append('') | 888 | outlines.append('') |
871 | outlines.extend(lines_after) | 889 | outlines.extend(lines_after) |
872 | 890 | ||
891 | outlines = [ line.rstrip('\n') +"\n" for line in outlines] | ||
892 | |||
873 | if extravalues: | 893 | if extravalues: |
874 | _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False) | 894 | _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True) |
875 | 895 | ||
876 | if args.extract_to: | 896 | if args.extract_to: |
877 | scriptutils.git_convert_standalone_clone(srctree) | 897 | scriptutils.git_convert_standalone_clone(srctree) |
@@ -887,7 +907,7 @@ def create_recipe(args): | |||
887 | log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) | 907 | log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) |
888 | 908 | ||
889 | if outfile == '-': | 909 | if outfile == '-': |
890 | sys.stdout.write('\n'.join(outlines) + '\n') | 910 | sys.stdout.write(''.join(outlines) + '\n') |
891 | else: | 911 | else: |
892 | with open(outfile, 'w') as f: | 912 | with open(outfile, 'w') as f: |
893 | lastline = None | 913 | lastline = None |
@@ -895,9 +915,14 @@ def create_recipe(args): | |||
895 | if not lastline and not line: | 915 | if not lastline and not line: |
896 | # Skip extra blank lines | 916 | # Skip extra blank lines |
897 | continue | 917 | continue |
898 | f.write('%s\n' % line) | 918 | f.write('%s' % line) |
899 | lastline = line | 919 | lastline = line |
900 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) | 920 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) |
921 | tinfoil.modified_files() | ||
922 | |||
923 | for task in run_tasks: | ||
924 | logger.info("Running task %s" % task) | ||
925 | tinfoil.build_file_sync(outfile, task) | ||
901 | 926 | ||
902 | if tempsrc: | 927 | if tempsrc: |
903 | if args.keep_temp: | 928 | if args.keep_temp: |
@@ -920,23 +945,32 @@ def split_value(value): | |||
920 | else: | 945 | else: |
921 | return value | 946 | return value |
922 | 947 | ||
948 | def fixup_license(value): | ||
949 | # Ensure licenses with OR starts and ends with brackets | ||
950 | if '|' in value: | ||
951 | return '(' + value + ')' | ||
952 | return value | ||
953 | |||
923 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): | 954 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): |
924 | lichandled = [x for x in handled if x[0] == 'license'] | 955 | lichandled = [x for x in handled if x[0] == 'license'] |
925 | if lichandled: | 956 | if lichandled: |
926 | # Someone else has already handled the license vars, just return their value | 957 | # Someone else has already handled the license vars, just return their value |
927 | return lichandled[0][1] | 958 | return lichandled[0][1] |
928 | 959 | ||
929 | licvalues = guess_license(srctree, d) | 960 | licvalues = find_licenses(srctree, d) |
930 | licenses = [] | 961 | licenses = [] |
931 | lic_files_chksum = [] | 962 | lic_files_chksum = [] |
932 | lic_unknown = [] | 963 | lic_unknown = [] |
933 | lines = [] | 964 | lines = [] |
934 | if licvalues: | 965 | if licvalues: |
935 | for licvalue in licvalues: | 966 | for licvalue in licvalues: |
936 | if not licvalue[0] in licenses: | 967 | license = licvalue[0] |
937 | licenses.append(licvalue[0]) | 968 | lics = tidy_licenses(fixup_license(license)) |
969 | lics = [lic for lic in lics if lic not in licenses] | ||
970 | if len(lics): | ||
971 | licenses.extend(lics) | ||
938 | lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) | 972 | lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) |
939 | if licvalue[0] == 'Unknown': | 973 | if license == 'Unknown': |
940 | lic_unknown.append(licvalue[1]) | 974 | lic_unknown.append(licvalue[1]) |
941 | if lic_unknown: | 975 | if lic_unknown: |
942 | lines.append('#') | 976 | lines.append('#') |
@@ -945,9 +979,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): | |||
945 | for licfile in lic_unknown: | 979 | for licfile in lic_unknown: |
946 | lines.append('# %s' % licfile) | 980 | lines.append('# %s' % licfile) |
947 | 981 | ||
948 | extra_license = split_value(extravalues.pop('LICENSE', [])) | 982 | extra_license = tidy_licenses(extravalues.pop('LICENSE', '')) |
949 | if '&' in extra_license: | ||
950 | extra_license.remove('&') | ||
951 | if extra_license: | 983 | if extra_license: |
952 | if licenses == ['Unknown']: | 984 | if licenses == ['Unknown']: |
953 | licenses = extra_license | 985 | licenses = extra_license |
@@ -988,7 +1020,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): | |||
988 | lines.append('# instead of &. If there is any doubt, check the accompanying documentation') | 1020 | lines.append('# instead of &. If there is any doubt, check the accompanying documentation') |
989 | lines.append('# to determine which situation is applicable.') | 1021 | lines.append('# to determine which situation is applicable.') |
990 | 1022 | ||
991 | lines.append('LICENSE = "%s"' % ' & '.join(licenses)) | 1023 | lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold))) |
992 | lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) | 1024 | lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) |
993 | lines.append('') | 1025 | lines.append('') |
994 | 1026 | ||
@@ -1005,228 +1037,15 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): | |||
1005 | handled.append(('license', licvalues)) | 1037 | handled.append(('license', licvalues)) |
1006 | return licvalues | 1038 | return licvalues |
1007 | 1039 | ||
1008 | def get_license_md5sums(d, static_only=False, linenumbers=False): | 1040 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): |
1009 | import bb.utils | ||
1010 | import csv | ||
1011 | md5sums = {} | ||
1012 | if not static_only and not linenumbers: | ||
1013 | # Gather md5sums of license files in common license dir | ||
1014 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
1015 | for fn in os.listdir(commonlicdir): | ||
1016 | md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) | ||
1017 | md5sums[md5value] = fn | ||
1018 | |||
1019 | # The following were extracted from common values in various recipes | ||
1020 | # (double checking the license against the license file itself, not just | ||
1021 | # the LICENSE value in the recipe) | ||
1022 | |||
1023 | # Read license md5sums from csv file | ||
1024 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | ||
1025 | for path in (d.getVar('BBPATH').split(':') | ||
1026 | + [os.path.join(scripts_path, '..', '..')]): | ||
1027 | csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv') | ||
1028 | if os.path.isfile(csv_path): | ||
1029 | with open(csv_path, newline='') as csv_file: | ||
1030 | fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5'] | ||
1031 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames) | ||
1032 | for row in reader: | ||
1033 | if linenumbers: | ||
1034 | md5sums[row['md5sum']] = ( | ||
1035 | row['license'], row['beginline'], row['endline'], row['md5']) | ||
1036 | else: | ||
1037 | md5sums[row['md5sum']] = row['license'] | ||
1038 | |||
1039 | return md5sums | ||
1040 | |||
1041 | def crunch_license(licfile): | ||
1042 | ''' | ||
1043 | Remove non-material text from a license file and then check | ||
1044 | its md5sum against a known list. This works well for licenses | ||
1045 | which contain a copyright statement, but is also a useful way | ||
1046 | to handle people's insistence upon reformatting the license text | ||
1047 | slightly (with no material difference to the text of the | ||
1048 | license). | ||
1049 | ''' | ||
1050 | |||
1051 | import oe.utils | ||
1052 | |||
1053 | # Note: these are carefully constructed! | ||
1054 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
1055 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
1056 | copyright_re = re.compile('^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
1057 | disclaimer_re = re.compile('^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
1058 | email_re = re.compile('^.*<[\w\.-]*@[\w\.\-]*>$') | ||
1059 | header_re = re.compile('^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
1060 | tag_re = re.compile('^ *@?\(?([Ll]icense|MIT)\)?$') | ||
1061 | url_re = re.compile('^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
1062 | |||
1063 | crunched_md5sums = {} | ||
1064 | |||
1065 | # common licenses | ||
1066 | crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0' | ||
1067 | crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = 'BSD-0-Clause' | ||
1068 | crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause' | ||
1069 | crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause' | ||
1070 | crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0' | ||
1071 | crunched_md5sums['975742a59ae1b8abdea63a97121f49f4'] = 'EDL-1.0' | ||
1072 | crunched_md5sums['5322cee4433d84fb3aafc9e253116447'] = 'EPL-1.0' | ||
1073 | crunched_md5sums['6922352e87de080f42419bed93063754'] = 'EPL-2.0' | ||
1074 | crunched_md5sums['793475baa22295cae1d3d4046a3a0ceb'] = 'GPL-2.0-only' | ||
1075 | crunched_md5sums['ff9047f969b02c20f0559470df5cb433'] = 'GPL-2.0-or-later' | ||
1076 | crunched_md5sums['ea6de5453fcadf534df246e6cdafadcd'] = 'GPL-3.0-only' | ||
1077 | crunched_md5sums['b419257d4d153a6fde92ddf96acf5b67'] = 'GPL-3.0-or-later' | ||
1078 | crunched_md5sums['228737f4c49d3ee75b8fb3706b090b84'] = 'ISC' | ||
1079 | crunched_md5sums['c6a782e826ca4e85bf7f8b89435a677d'] = 'LGPL-2.0-only' | ||
1080 | crunched_md5sums['32d8f758a066752f0db09bd7624b8090'] = 'LGPL-2.0-or-later' | ||
1081 | crunched_md5sums['4820937eb198b4f84c52217ed230be33'] = 'LGPL-2.1-only' | ||
1082 | crunched_md5sums['db13fe9f3a13af7adab2dc7a76f9e44a'] = 'LGPL-2.1-or-later' | ||
1083 | crunched_md5sums['d7a0f2e4e0950e837ac3eabf5bd1d246'] = 'LGPL-3.0-only' | ||
1084 | crunched_md5sums['abbf328e2b434f9153351f06b9f79d02'] = 'LGPL-3.0-or-later' | ||
1085 | crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT' | ||
1086 | crunched_md5sums['b218b0e94290b9b818c4be67c8e1cc82'] = 'MIT-0' | ||
1087 | crunched_md5sums['ddc18131d6748374f0f35a621c245b49'] = 'Unlicense' | ||
1088 | crunched_md5sums['51f9570ff32571fc0a443102285c5e33'] = 'WTFPL' | ||
1089 | |||
1090 | # The following two were gleaned from the "forever" npm package | ||
1091 | crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' | ||
1092 | # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt | ||
1093 | crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' | ||
1094 | # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE | ||
1095 | crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPLv2' | ||
1096 | # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt | ||
1097 | crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPLv2' | ||
1098 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 | ||
1099 | crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPLv2.1' | ||
1100 | # unixODBC-2.3.4 COPYING | ||
1101 | crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPLv2.1' | ||
1102 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 | ||
1103 | crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPLv3' | ||
1104 | # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 | ||
1105 | crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' | ||
1106 | |||
1107 | # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD | ||
1108 | crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause' | ||
1109 | # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE | ||
1110 | crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause' | ||
1111 | # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE | ||
1112 | crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause' | ||
1113 | # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE | ||
1114 | crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause' | ||
1115 | # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE | ||
1116 | crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause' | ||
1117 | # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE | ||
1118 | crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause' | ||
1119 | # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE | ||
1120 | crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause' | ||
1121 | # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE | ||
1122 | crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause' | ||
1123 | # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE | ||
1124 | crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause' | ||
1125 | # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE | ||
1126 | crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT' | ||
1127 | # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE | ||
1128 | crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT' | ||
1129 | # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE | ||
1130 | crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0' | ||
1131 | # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md | ||
1132 | crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0' | ||
1133 | # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE | ||
1134 | crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0' | ||
1135 | # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt | ||
1136 | crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0' | ||
1137 | # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE | ||
1138 | crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0' | ||
1139 | # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE | ||
1140 | crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense' | ||
1141 | # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md | ||
1142 | crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib' | ||
1143 | |||
1144 | lictext = [] | ||
1145 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
1146 | for line in f: | ||
1147 | # Drop opening statements | ||
1148 | if copyright_re.match(line): | ||
1149 | continue | ||
1150 | elif disclaimer_re.match(line): | ||
1151 | continue | ||
1152 | elif email_re.match(line): | ||
1153 | continue | ||
1154 | elif header_re.match(line): | ||
1155 | continue | ||
1156 | elif tag_re.match(line): | ||
1157 | continue | ||
1158 | elif url_re.match(line): | ||
1159 | continue | ||
1160 | elif license_title_re.match(line): | ||
1161 | continue | ||
1162 | elif license_statement_re.match(line): | ||
1163 | continue | ||
1164 | # Strip comment symbols | ||
1165 | line = line.replace('*', '') \ | ||
1166 | .replace('#', '') | ||
1167 | # Unify spelling | ||
1168 | line = line.replace('sub-license', 'sublicense') | ||
1169 | # Squash spaces | ||
1170 | line = oe.utils.squashspaces(line.strip()) | ||
1171 | # Replace smart quotes, double quotes and backticks with single quotes | ||
1172 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
1173 | # Unify brackets | ||
1174 | line = line.replace("{", "[").replace("}", "]") | ||
1175 | if line: | ||
1176 | lictext.append(line) | ||
1177 | |||
1178 | m = hashlib.md5() | ||
1179 | try: | ||
1180 | m.update(' '.join(lictext).encode('utf-8')) | ||
1181 | md5val = m.hexdigest() | ||
1182 | except UnicodeEncodeError: | ||
1183 | md5val = None | ||
1184 | lictext = '' | ||
1185 | license = crunched_md5sums.get(md5val, None) | ||
1186 | return license, md5val, lictext | ||
1187 | |||
1188 | def guess_license(srctree, d): | ||
1189 | import bb | ||
1190 | md5sums = get_license_md5sums(d) | ||
1191 | |||
1192 | licenses = [] | ||
1193 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
1194 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts") | ||
1195 | licfiles = [] | ||
1196 | for root, dirs, files in os.walk(srctree): | ||
1197 | for fn in files: | ||
1198 | if fn.endswith(skip_extensions): | ||
1199 | continue | ||
1200 | for spec in licspecs: | ||
1201 | if fnmatch.fnmatch(fn, spec): | ||
1202 | fullpath = os.path.join(root, fn) | ||
1203 | if not fullpath in licfiles: | ||
1204 | licfiles.append(fullpath) | ||
1205 | for licfile in licfiles: | ||
1206 | md5value = bb.utils.md5_file(licfile) | ||
1207 | license = md5sums.get(md5value, None) | ||
1208 | if not license: | ||
1209 | license, crunched_md5, lictext = crunch_license(licfile) | ||
1210 | if lictext and not license: | ||
1211 | license = 'Unknown' | ||
1212 | logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \ | ||
1213 | "and replace `Unknown` with the license:\n" \ | ||
1214 | "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value)) | ||
1215 | if license: | ||
1216 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
1217 | |||
1218 | # FIXME should we grab at least one source file with a license header and add that too? | ||
1219 | |||
1220 | return licenses | ||
1221 | |||
1222 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='${PN}'): | ||
1223 | """ | 1041 | """ |
1224 | Given a list of (license, path, md5sum) as returned by guess_license(), | 1042 | Given a list of (license, path, md5sum) as returned by match_licenses(), |
1225 | a dict of package name to path mappings, write out a set of | 1043 | a dict of package name to path mappings, write out a set of |
1226 | package-specific LICENSE values. | 1044 | package-specific LICENSE values. |
1227 | """ | 1045 | """ |
1228 | pkglicenses = {pn: []} | 1046 | pkglicenses = {pn: []} |
1229 | for license, licpath, _ in licvalues: | 1047 | for license, licpath, _ in licvalues: |
1048 | license = fixup_license(license) | ||
1230 | for pkgname, pkgpath in packages.items(): | 1049 | for pkgname, pkgpath in packages.items(): |
1231 | if licpath.startswith(pkgpath + '/'): | 1050 | if licpath.startswith(pkgpath + '/'): |
1232 | if pkgname in pkglicenses: | 1051 | if pkgname in pkglicenses: |
@@ -1239,13 +1058,24 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn=' | |||
1239 | pkglicenses[pn].append(license) | 1058 | pkglicenses[pn].append(license) |
1240 | outlicenses = {} | 1059 | outlicenses = {} |
1241 | for pkgname in packages: | 1060 | for pkgname in packages: |
1242 | license = ' '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' | 1061 | # Assume AND operator between license files |
1243 | if license == 'Unknown' and pkgname in fallback_licenses: | 1062 | license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' |
1063 | if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses: | ||
1244 | license = fallback_licenses[pkgname] | 1064 | license = fallback_licenses[pkgname] |
1065 | licenses = tidy_licenses(license) | ||
1066 | license = ' & '.join(licenses) | ||
1245 | outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) | 1067 | outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) |
1246 | outlicenses[pkgname] = license.split() | 1068 | outlicenses[pkgname] = licenses |
1247 | return outlicenses | 1069 | return outlicenses |
1248 | 1070 | ||
1071 | def generate_common_licenses_chksums(common_licenses, d): | ||
1072 | lic_files_chksums = [] | ||
1073 | for license in tidy_licenses(common_licenses): | ||
1074 | licfile = '${COMMON_LICENSE_DIR}/' + license | ||
1075 | md5value = bb.utils.md5_file(d.expand(licfile)) | ||
1076 | lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value)) | ||
1077 | return lic_files_chksums | ||
1078 | |||
1249 | def read_pkgconfig_provides(d): | 1079 | def read_pkgconfig_provides(d): |
1250 | pkgdatadir = d.getVar('PKGDATA_DIR') | 1080 | pkgdatadir = d.getVar('PKGDATA_DIR') |
1251 | pkgmap = {} | 1081 | pkgmap = {} |
@@ -1376,7 +1206,7 @@ def register_commands(subparsers): | |||
1376 | parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') | 1206 | parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') |
1377 | parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') | 1207 | parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') |
1378 | parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') | 1208 | parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') |
1209 | parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class') | ||
1379 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) | 1210 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) |
1380 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') | 1211 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') |
1381 | parser_create.set_defaults(func=create_recipe) | 1212 | parser_create.set_defaults(func=create_recipe) |
1382 | |||
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py index 5015634476..ec9d510e23 100644 --- a/scripts/lib/recipetool/create_buildsys.py +++ b/scripts/lib/recipetool/create_buildsys.py | |||
@@ -5,9 +5,9 @@ | |||
5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
6 | # | 6 | # |
7 | 7 | ||
8 | import os | ||
8 | import re | 9 | import re |
9 | import logging | 10 | import logging |
10 | import glob | ||
11 | from recipetool.create import RecipeHandler, validate_pv | 11 | from recipetool.create import RecipeHandler, validate_pv |
12 | 12 | ||
13 | logger = logging.getLogger('recipetool') | 13 | logger = logging.getLogger('recipetool') |
@@ -137,15 +137,15 @@ class CmakeRecipeHandler(RecipeHandler): | |||
137 | deps = [] | 137 | deps = [] |
138 | unmappedpkgs = [] | 138 | unmappedpkgs = [] |
139 | 139 | ||
140 | proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE) | 140 | proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE) |
141 | pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) | 141 | pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) |
142 | pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) | 142 | pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) |
143 | findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) | 143 | findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) |
144 | findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*') | 144 | findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*') |
145 | checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) | 145 | checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) |
146 | include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) | 146 | include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) |
147 | subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) | 147 | subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) |
148 | dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') | 148 | dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') |
149 | 149 | ||
150 | def find_cmake_package(pkg): | 150 | def find_cmake_package(pkg): |
151 | RecipeHandler.load_devel_filemap(tinfoil.config_data) | 151 | RecipeHandler.load_devel_filemap(tinfoil.config_data) |
@@ -423,16 +423,16 @@ class AutotoolsRecipeHandler(RecipeHandler): | |||
423 | 'makeinfo': 'texinfo', | 423 | 'makeinfo': 'texinfo', |
424 | } | 424 | } |
425 | 425 | ||
426 | pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') | 426 | pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') |
427 | pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*') | 427 | pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*') |
428 | lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*') | 428 | lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*') |
429 | libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*') | 429 | libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*') |
430 | progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') | 430 | progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') |
431 | dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') | 431 | dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?') |
432 | ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*') | 432 | ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*') |
433 | am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*') | 433 | am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*') |
434 | define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)') | 434 | define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)') |
435 | version_re = re.compile('([0-9.]+)') | 435 | version_re = re.compile(r'([0-9.]+)') |
436 | 436 | ||
437 | defines = {} | 437 | defines = {} |
438 | def subst_defines(value): | 438 | def subst_defines(value): |
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py index 0b6b042ed1..a807dafae5 100644 --- a/scripts/lib/recipetool/create_buildsys_python.py +++ b/scripts/lib/recipetool/create_buildsys_python.py | |||
@@ -8,9 +8,9 @@ | |||
8 | import ast | 8 | import ast |
9 | import codecs | 9 | import codecs |
10 | import collections | 10 | import collections |
11 | import distutils.command.build_py | 11 | import setuptools.command.build_py |
12 | import email | 12 | import email |
13 | import imp | 13 | import importlib |
14 | import glob | 14 | import glob |
15 | import itertools | 15 | import itertools |
16 | import logging | 16 | import logging |
@@ -18,7 +18,11 @@ import os | |||
18 | import re | 18 | import re |
19 | import sys | 19 | import sys |
20 | import subprocess | 20 | import subprocess |
21 | import json | ||
22 | import urllib.request | ||
21 | from recipetool.create import RecipeHandler | 23 | from recipetool.create import RecipeHandler |
24 | from urllib.parse import urldefrag | ||
25 | from recipetool.create import determine_from_url | ||
22 | 26 | ||
23 | logger = logging.getLogger('recipetool') | 27 | logger = logging.getLogger('recipetool') |
24 | 28 | ||
@@ -37,7 +41,334 @@ class PythonRecipeHandler(RecipeHandler): | |||
37 | assume_provided = ['builtins', 'os.path'] | 41 | assume_provided = ['builtins', 'os.path'] |
38 | # Assumes that the host python3 builtin_module_names is sane for target too | 42 | # Assumes that the host python3 builtin_module_names is sane for target too |
39 | assume_provided = assume_provided + list(sys.builtin_module_names) | 43 | assume_provided = assume_provided + list(sys.builtin_module_names) |
44 | excluded_fields = [] | ||
40 | 45 | ||
46 | |||
47 | classifier_license_map = { | ||
48 | 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL', | ||
49 | 'License :: OSI Approved :: Apache Software License': 'Apache', | ||
50 | 'License :: OSI Approved :: Apple Public Source License': 'APSL', | ||
51 | 'License :: OSI Approved :: Artistic License': 'Artistic', | ||
52 | 'License :: OSI Approved :: Attribution Assurance License': 'AAL', | ||
53 | 'License :: OSI Approved :: BSD License': 'BSD-3-Clause', | ||
54 | 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0', | ||
55 | 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1', | ||
56 | 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0', | ||
57 | 'License :: OSI Approved :: Common Public License': 'CPL', | ||
58 | 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0', | ||
59 | 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0', | ||
60 | 'License :: OSI Approved :: Eiffel Forum License': 'EFL', | ||
61 | 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0', | ||
62 | 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1', | ||
63 | 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2', | ||
64 | 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only', | ||
65 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later', | ||
66 | 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL', | ||
67 | 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL', | ||
68 | 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only', | ||
69 | 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later', | ||
70 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only', | ||
71 | 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later', | ||
72 | 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only', | ||
73 | 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later', | ||
74 | 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only', | ||
75 | 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later', | ||
76 | 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL', | ||
77 | 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND', | ||
78 | 'License :: OSI Approved :: IBM Public License': 'IPL', | ||
79 | 'License :: OSI Approved :: ISC License (ISCL)': 'ISC', | ||
80 | 'License :: OSI Approved :: Intel Open Source License': 'Intel', | ||
81 | 'License :: OSI Approved :: Jabber Open Source License': 'Jabber', | ||
82 | 'License :: OSI Approved :: MIT License': 'MIT', | ||
83 | 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0', | ||
84 | 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL', | ||
85 | 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS', | ||
86 | 'License :: OSI Approved :: Motosoto License': 'Motosoto', | ||
87 | 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0', | ||
88 | 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1', | ||
89 | 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0', | ||
90 | 'License :: OSI Approved :: Nethack General Public License': 'NGPL', | ||
91 | 'License :: OSI Approved :: Nokia Open Source License': 'Nokia', | ||
92 | 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL', | ||
93 | 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0', | ||
94 | 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL', | ||
95 | 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python', | ||
96 | 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0', | ||
97 | 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL', | ||
98 | 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL', | ||
99 | 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1', | ||
100 | 'License :: OSI Approved :: Sleepycat License': 'Sleepycat', | ||
101 | 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL', | ||
102 | 'License :: OSI Approved :: Sun Public License': 'SPL', | ||
103 | 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense', | ||
104 | 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0', | ||
105 | 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA', | ||
106 | 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0', | ||
107 | 'License :: OSI Approved :: W3C License': 'W3C', | ||
108 | 'License :: OSI Approved :: X.Net License': 'Xnet', | ||
109 | 'License :: OSI Approved :: Zope Public License': 'ZPL', | ||
110 | 'License :: OSI Approved :: zlib/libpng License': 'Zlib', | ||
111 | 'License :: Other/Proprietary License': 'Proprietary', | ||
112 | 'License :: Public Domain': 'PD', | ||
113 | } | ||
114 | |||
115 | def __init__(self): | ||
116 | pass | ||
117 | |||
118 | def process_url(self, args, classes, handled, extravalues): | ||
119 | """ | ||
120 | Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/... | ||
121 | which corresponds to the archive location, and add pypi class | ||
122 | """ | ||
123 | |||
124 | if 'url' in handled: | ||
125 | return None | ||
126 | |||
127 | fetch_uri = None | ||
128 | source = args.source | ||
129 | required_version = args.version if args.version else None | ||
130 | match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0]) | ||
131 | if match: | ||
132 | package = match.group(1) | ||
133 | version = match.group(2) if match.group(2) else required_version | ||
134 | |||
135 | json_url = f"https://pypi.org/pypi/%s/json" % package | ||
136 | response = urllib.request.urlopen(json_url) | ||
137 | if response.status == 200: | ||
138 | data = json.loads(response.read()) | ||
139 | if not version: | ||
140 | # grab latest version | ||
141 | version = data["info"]["version"] | ||
142 | pypi_package = data["info"]["name"] | ||
143 | for release in reversed(data["releases"][version]): | ||
144 | if release["packagetype"] == "sdist": | ||
145 | fetch_uri = release["url"] | ||
146 | break | ||
147 | else: | ||
148 | logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url) | ||
149 | return None | ||
150 | else: | ||
151 | match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source) | ||
152 | if match: | ||
153 | fetch_uri = source | ||
154 | pypi_package = match.group(1) | ||
155 | _, version = determine_from_url(fetch_uri) | ||
156 | |||
157 | if match and not args.no_pypi: | ||
158 | if required_version and version != required_version: | ||
159 | raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version)) | ||
160 | # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass) | ||
161 | # but at this point we cannot know because because user can specify the output name of the recipe on the command line | ||
162 | extravalues["PYPI_PACKAGE"] = pypi_package | ||
163 | # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe | ||
164 | pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri) | ||
165 | if pypi_package_ext: | ||
166 | pypi_package_ext = pypi_package_ext.group(1) | ||
167 | if pypi_package_ext != "tar.gz": | ||
168 | extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext | ||
169 | |||
170 | # Pypi class will handle S and SRC_URI variables, so remove them | ||
171 | # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to: | ||
172 | # extravalues['SRC_URI(?:\[.*?\])?'] = None | ||
173 | extravalues['S'] = None | ||
174 | extravalues['SRC_URI'] = None | ||
175 | |||
176 | classes.append('pypi') | ||
177 | |||
178 | handled.append('url') | ||
179 | return fetch_uri | ||
180 | |||
181 | def handle_classifier_license(self, classifiers, existing_licenses=""): | ||
182 | |||
183 | licenses = [] | ||
184 | for classifier in classifiers: | ||
185 | if classifier in self.classifier_license_map: | ||
186 | license = self.classifier_license_map[classifier] | ||
187 | if license == 'Apache' and 'Apache-2.0' in existing_licenses: | ||
188 | license = 'Apache-2.0' | ||
189 | elif license == 'GPL': | ||
190 | if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses: | ||
191 | license = 'GPL-2.0' | ||
192 | elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses: | ||
193 | license = 'GPL-3.0' | ||
194 | elif license == 'LGPL': | ||
195 | if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses: | ||
196 | license = 'LGPL-2.1' | ||
197 | elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses: | ||
198 | license = 'LGPL-2.0' | ||
199 | elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses: | ||
200 | license = 'LGPL-3.0' | ||
201 | licenses.append(license) | ||
202 | |||
203 | if licenses: | ||
204 | return ' & '.join(licenses) | ||
205 | |||
206 | return None | ||
207 | |||
208 | def map_info_to_bbvar(self, info, extravalues): | ||
209 | |||
210 | # Map PKG-INFO & setup.py fields to bitbake variables | ||
211 | for field, values in info.items(): | ||
212 | if field in self.excluded_fields: | ||
213 | continue | ||
214 | |||
215 | if field not in self.bbvar_map: | ||
216 | continue | ||
217 | |||
218 | if isinstance(values, str): | ||
219 | value = values | ||
220 | else: | ||
221 | value = ' '.join(str(v) for v in values if v) | ||
222 | |||
223 | bbvar = self.bbvar_map[field] | ||
224 | if bbvar == "PN": | ||
225 | # by convention python recipes start with "python3-" | ||
226 | if not value.startswith('python'): | ||
227 | value = 'python3-' + value | ||
228 | |||
229 | if bbvar not in extravalues and value: | ||
230 | extravalues[bbvar] = value | ||
231 | |||
232 | def apply_info_replacements(self, info): | ||
233 | if not self.replacements: | ||
234 | return | ||
235 | |||
236 | for variable, search, replace in self.replacements: | ||
237 | if variable not in info: | ||
238 | continue | ||
239 | |||
240 | def replace_value(search, replace, value): | ||
241 | if replace is None: | ||
242 | if re.search(search, value): | ||
243 | return None | ||
244 | else: | ||
245 | new_value = re.sub(search, replace, value) | ||
246 | if value != new_value: | ||
247 | return new_value | ||
248 | return value | ||
249 | |||
250 | value = info[variable] | ||
251 | if isinstance(value, str): | ||
252 | new_value = replace_value(search, replace, value) | ||
253 | if new_value is None: | ||
254 | del info[variable] | ||
255 | elif new_value != value: | ||
256 | info[variable] = new_value | ||
257 | elif hasattr(value, 'items'): | ||
258 | for dkey, dvalue in list(value.items()): | ||
259 | new_list = [] | ||
260 | for pos, a_value in enumerate(dvalue): | ||
261 | new_value = replace_value(search, replace, a_value) | ||
262 | if new_value is not None and new_value != value: | ||
263 | new_list.append(new_value) | ||
264 | |||
265 | if value != new_list: | ||
266 | value[dkey] = new_list | ||
267 | else: | ||
268 | new_list = [] | ||
269 | for pos, a_value in enumerate(value): | ||
270 | new_value = replace_value(search, replace, a_value) | ||
271 | if new_value is not None and new_value != value: | ||
272 | new_list.append(new_value) | ||
273 | |||
274 | if value != new_list: | ||
275 | info[variable] = new_list | ||
276 | |||
277 | |||
278 | def scan_python_dependencies(self, paths): | ||
279 | deps = set() | ||
280 | try: | ||
281 | dep_output = self.run_command(['pythondeps', '-d'] + paths) | ||
282 | except (OSError, subprocess.CalledProcessError): | ||
283 | pass | ||
284 | else: | ||
285 | for line in dep_output.splitlines(): | ||
286 | line = line.rstrip() | ||
287 | dep, filename = line.split('\t', 1) | ||
288 | if filename.endswith('/setup.py'): | ||
289 | continue | ||
290 | deps.add(dep) | ||
291 | |||
292 | try: | ||
293 | provides_output = self.run_command(['pythondeps', '-p'] + paths) | ||
294 | except (OSError, subprocess.CalledProcessError): | ||
295 | pass | ||
296 | else: | ||
297 | provides_lines = (l.rstrip() for l in provides_output.splitlines()) | ||
298 | provides = set(l for l in provides_lines if l and l != 'setup') | ||
299 | deps -= provides | ||
300 | |||
301 | return deps | ||
302 | |||
303 | def parse_pkgdata_for_python_packages(self): | ||
304 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') | ||
305 | |||
306 | ldata = tinfoil.config_data.createCopy() | ||
307 | bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True) | ||
308 | python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR') | ||
309 | |||
310 | dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') | ||
311 | python_dirs = [python_sitedir + os.sep, | ||
312 | os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, | ||
313 | os.path.dirname(python_sitedir) + os.sep] | ||
314 | packages = {} | ||
315 | for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): | ||
316 | files_info = None | ||
317 | with open(pkgdatafile, 'r') as f: | ||
318 | for line in f.readlines(): | ||
319 | field, value = line.split(': ', 1) | ||
320 | if field.startswith('FILES_INFO'): | ||
321 | files_info = ast.literal_eval(value) | ||
322 | break | ||
323 | else: | ||
324 | continue | ||
325 | |||
326 | for fn in files_info: | ||
327 | for suffix in importlib.machinery.all_suffixes(): | ||
328 | if fn.endswith(suffix): | ||
329 | break | ||
330 | else: | ||
331 | continue | ||
332 | |||
333 | if fn.startswith(dynload_dir + os.sep): | ||
334 | if '/.debug/' in fn: | ||
335 | continue | ||
336 | base = os.path.basename(fn) | ||
337 | provided = base.split('.', 1)[0] | ||
338 | packages[provided] = os.path.basename(pkgdatafile) | ||
339 | continue | ||
340 | |||
341 | for python_dir in python_dirs: | ||
342 | if fn.startswith(python_dir): | ||
343 | relpath = fn[len(python_dir):] | ||
344 | relstart, _, relremaining = relpath.partition(os.sep) | ||
345 | if relstart.endswith('.egg'): | ||
346 | relpath = relremaining | ||
347 | base, _ = os.path.splitext(relpath) | ||
348 | |||
349 | if '/.debug/' in base: | ||
350 | continue | ||
351 | if os.path.basename(base) == '__init__': | ||
352 | base = os.path.dirname(base) | ||
353 | base = base.replace(os.sep + os.sep, os.sep) | ||
354 | provided = base.replace(os.sep, '.') | ||
355 | packages[provided] = os.path.basename(pkgdatafile) | ||
356 | return packages | ||
357 | |||
358 | @classmethod | ||
359 | def run_command(cls, cmd, **popenargs): | ||
360 | if 'stderr' not in popenargs: | ||
361 | popenargs['stderr'] = subprocess.STDOUT | ||
362 | try: | ||
363 | return subprocess.check_output(cmd, **popenargs).decode('utf-8') | ||
364 | except OSError as exc: | ||
365 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) | ||
366 | raise | ||
367 | except subprocess.CalledProcessError as exc: | ||
368 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) | ||
369 | raise | ||
370 | |||
371 | class PythonSetupPyRecipeHandler(PythonRecipeHandler): | ||
41 | bbvar_map = { | 372 | bbvar_map = { |
42 | 'Name': 'PN', | 373 | 'Name': 'PN', |
43 | 'Version': 'PV', | 374 | 'Version': 'PV', |
@@ -75,6 +406,7 @@ class PythonRecipeHandler(RecipeHandler): | |||
75 | 'Supported-Platform', | 406 | 'Supported-Platform', |
76 | ] | 407 | ] |
77 | setuparg_multi_line_values = ['Description'] | 408 | setuparg_multi_line_values = ['Description'] |
409 | |||
78 | replacements = [ | 410 | replacements = [ |
79 | ('License', r' +$', ''), | 411 | ('License', r' +$', ''), |
80 | ('License', r'^ +', ''), | 412 | ('License', r'^ +', ''), |
@@ -95,71 +427,161 @@ class PythonRecipeHandler(RecipeHandler): | |||
95 | ('Install-requires', r'\[[^\]]+\]$', ''), | 427 | ('Install-requires', r'\[[^\]]+\]$', ''), |
96 | ] | 428 | ] |
97 | 429 | ||
98 | classifier_license_map = { | ||
99 | 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL', | ||
100 | 'License :: OSI Approved :: Apache Software License': 'Apache', | ||
101 | 'License :: OSI Approved :: Apple Public Source License': 'APSL', | ||
102 | 'License :: OSI Approved :: Artistic License': 'Artistic', | ||
103 | 'License :: OSI Approved :: Attribution Assurance License': 'AAL', | ||
104 | 'License :: OSI Approved :: BSD License': 'BSD-3-Clause', | ||
105 | 'License :: OSI Approved :: Common Public License': 'CPL', | ||
106 | 'License :: OSI Approved :: Eiffel Forum License': 'EFL', | ||
107 | 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0', | ||
108 | 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1', | ||
109 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0+', | ||
110 | 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0', | ||
111 | 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL', | ||
112 | 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL', | ||
113 | 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0', | ||
114 | 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0+', | ||
115 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0', | ||
116 | 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0+', | ||
117 | 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0', | ||
118 | 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0+', | ||
119 | 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0', | ||
120 | 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0+', | ||
121 | 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL', | ||
122 | 'License :: OSI Approved :: IBM Public License': 'IPL', | ||
123 | 'License :: OSI Approved :: ISC License (ISCL)': 'ISC', | ||
124 | 'License :: OSI Approved :: Intel Open Source License': 'Intel', | ||
125 | 'License :: OSI Approved :: Jabber Open Source License': 'Jabber', | ||
126 | 'License :: OSI Approved :: MIT License': 'MIT', | ||
127 | 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL', | ||
128 | 'License :: OSI Approved :: Motosoto License': 'Motosoto', | ||
129 | 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0', | ||
130 | 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1', | ||
131 | 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0', | ||
132 | 'License :: OSI Approved :: Nethack General Public License': 'NGPL', | ||
133 | 'License :: OSI Approved :: Nokia Open Source License': 'Nokia', | ||
134 | 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL', | ||
135 | 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python', | ||
136 | 'License :: OSI Approved :: Python Software Foundation License': 'PSF', | ||
137 | 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL', | ||
138 | 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL', | ||
139 | 'License :: OSI Approved :: Sleepycat License': 'Sleepycat', | ||
140 | 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': '-- Sun Industry Standards Source License (SISSL)', | ||
141 | 'License :: OSI Approved :: Sun Public License': 'SPL', | ||
142 | 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA', | ||
143 | 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0', | ||
144 | 'License :: OSI Approved :: W3C License': 'W3C', | ||
145 | 'License :: OSI Approved :: X.Net License': 'Xnet', | ||
146 | 'License :: OSI Approved :: Zope Public License': 'ZPL', | ||
147 | 'License :: OSI Approved :: zlib/libpng License': 'Zlib', | ||
148 | } | ||
149 | |||
150 | def __init__(self): | 430 | def __init__(self): |
151 | pass | 431 | pass |
152 | 432 | ||
433 | def parse_setup_py(self, setupscript='./setup.py'): | ||
434 | with codecs.open(setupscript) as f: | ||
435 | info, imported_modules, non_literals, extensions = gather_setup_info(f) | ||
436 | |||
437 | def _map(key): | ||
438 | key = key.replace('_', '-') | ||
439 | key = key[0].upper() + key[1:] | ||
440 | if key in self.setup_parse_map: | ||
441 | key = self.setup_parse_map[key] | ||
442 | return key | ||
443 | |||
444 | # Naive mapping of setup() arguments to PKG-INFO field names | ||
445 | for d in [info, non_literals]: | ||
446 | for key, value in list(d.items()): | ||
447 | if key is None: | ||
448 | continue | ||
449 | new_key = _map(key) | ||
450 | if new_key != key: | ||
451 | del d[key] | ||
452 | d[new_key] = value | ||
453 | |||
454 | return info, 'setuptools' in imported_modules, non_literals, extensions | ||
455 | |||
456 | def get_setup_args_info(self, setupscript='./setup.py'): | ||
457 | cmd = ['python3', setupscript] | ||
458 | info = {} | ||
459 | keys = set(self.bbvar_map.keys()) | ||
460 | keys |= set(self.setuparg_list_fields) | ||
461 | keys |= set(self.setuparg_multi_line_values) | ||
462 | grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values)) | ||
463 | for index, keys in grouped_keys: | ||
464 | if index == (True, False): | ||
465 | # Splitlines output for each arg as a list value | ||
466 | for key in keys: | ||
467 | arg = self.setuparg_map.get(key, key.lower()) | ||
468 | try: | ||
469 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) | ||
470 | except (OSError, subprocess.CalledProcessError): | ||
471 | pass | ||
472 | else: | ||
473 | info[key] = [l.rstrip() for l in arg_info.splitlines()] | ||
474 | elif index == (False, True): | ||
475 | # Entire output for each arg | ||
476 | for key in keys: | ||
477 | arg = self.setuparg_map.get(key, key.lower()) | ||
478 | try: | ||
479 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) | ||
480 | except (OSError, subprocess.CalledProcessError): | ||
481 | pass | ||
482 | else: | ||
483 | info[key] = arg_info | ||
484 | else: | ||
485 | info.update(self.get_setup_byline(list(keys), setupscript)) | ||
486 | return info | ||
487 | |||
488 | def get_setup_byline(self, fields, setupscript='./setup.py'): | ||
489 | info = {} | ||
490 | |||
491 | cmd = ['python3', setupscript] | ||
492 | cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) | ||
493 | try: | ||
494 | info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines() | ||
495 | except (OSError, subprocess.CalledProcessError): | ||
496 | pass | ||
497 | else: | ||
498 | if len(fields) != len(info_lines): | ||
499 | logger.error('Mismatch between setup.py output lines and number of fields') | ||
500 | sys.exit(1) | ||
501 | |||
502 | for lineno, line in enumerate(info_lines): | ||
503 | line = line.rstrip() | ||
504 | info[fields[lineno]] = line | ||
505 | return info | ||
506 | |||
507 | def get_pkginfo(self, pkginfo_fn): | ||
508 | msg = email.message_from_file(open(pkginfo_fn, 'r')) | ||
509 | msginfo = {} | ||
510 | for field in msg.keys(): | ||
511 | values = msg.get_all(field) | ||
512 | if len(values) == 1: | ||
513 | msginfo[field] = values[0] | ||
514 | else: | ||
515 | msginfo[field] = values | ||
516 | return msginfo | ||
517 | |||
518 | def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals): | ||
519 | if 'Package-dir' in setup_info: | ||
520 | package_dir = setup_info['Package-dir'] | ||
521 | else: | ||
522 | package_dir = {} | ||
523 | |||
524 | dist = setuptools.Distribution() | ||
525 | |||
526 | class PackageDir(setuptools.command.build_py.build_py): | ||
527 | def __init__(self, package_dir): | ||
528 | self.package_dir = package_dir | ||
529 | self.dist = dist | ||
530 | super().__init__(self.dist) | ||
531 | |||
532 | pd = PackageDir(package_dir) | ||
533 | to_scan = [] | ||
534 | if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']): | ||
535 | if 'Py-modules' in setup_info: | ||
536 | for module in setup_info['Py-modules']: | ||
537 | try: | ||
538 | package, module = module.rsplit('.', 1) | ||
539 | except ValueError: | ||
540 | package, module = '.', module | ||
541 | module_path = os.path.join(pd.get_package_dir(package), module + '.py') | ||
542 | to_scan.append(module_path) | ||
543 | |||
544 | if 'Packages' in setup_info: | ||
545 | for package in setup_info['Packages']: | ||
546 | to_scan.append(pd.get_package_dir(package)) | ||
547 | |||
548 | if 'Scripts' in setup_info: | ||
549 | to_scan.extend(setup_info['Scripts']) | ||
550 | else: | ||
551 | logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") | ||
552 | |||
553 | if not to_scan: | ||
554 | to_scan = ['.'] | ||
555 | |||
556 | logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan)) | ||
557 | |||
558 | provided_packages = self.parse_pkgdata_for_python_packages() | ||
559 | scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan]) | ||
560 | mapped_deps, unmapped_deps = set(self.base_pkgdeps), set() | ||
561 | for dep in scanned_deps: | ||
562 | mapped = provided_packages.get(dep) | ||
563 | if mapped: | ||
564 | logger.debug('Mapped %s to %s' % (dep, mapped)) | ||
565 | mapped_deps.add(mapped) | ||
566 | else: | ||
567 | logger.debug('Could not map %s' % dep) | ||
568 | unmapped_deps.add(dep) | ||
569 | return mapped_deps, unmapped_deps | ||
570 | |||
153 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | 571 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): |
572 | |||
154 | if 'buildsystem' in handled: | 573 | if 'buildsystem' in handled: |
155 | return False | 574 | return False |
156 | 575 | ||
576 | logger.debug("Trying setup.py parser") | ||
577 | |||
157 | # Check for non-zero size setup.py files | 578 | # Check for non-zero size setup.py files |
158 | setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) | 579 | setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) |
159 | for fn in setupfiles: | 580 | for fn in setupfiles: |
160 | if os.path.getsize(fn): | 581 | if os.path.getsize(fn): |
161 | break | 582 | break |
162 | else: | 583 | else: |
584 | logger.debug("No setup.py found") | ||
163 | return False | 585 | return False |
164 | 586 | ||
165 | # setup.py is always parsed to get at certain required information, such as | 587 | # setup.py is always parsed to get at certain required information, such as |
@@ -193,6 +615,18 @@ class PythonRecipeHandler(RecipeHandler): | |||
193 | continue | 615 | continue |
194 | 616 | ||
195 | if line.startswith('['): | 617 | if line.startswith('['): |
618 | # PACKAGECONFIG must not contain expressions or whitespace | ||
619 | line = line.replace(" ", "") | ||
620 | line = line.replace(':', "") | ||
621 | line = line.replace('.', "-dot-") | ||
622 | line = line.replace('"', "") | ||
623 | line = line.replace('<', "-smaller-") | ||
624 | line = line.replace('>', "-bigger-") | ||
625 | line = line.replace('_', "-") | ||
626 | line = line.replace('(', "") | ||
627 | line = line.replace(')', "") | ||
628 | line = line.replace('!', "-not-") | ||
629 | line = line.replace('=', "-equals-") | ||
196 | current_feature = line[1:-1] | 630 | current_feature = line[1:-1] |
197 | elif current_feature: | 631 | elif current_feature: |
198 | extras_req[current_feature].append(line) | 632 | extras_req[current_feature].append(line) |
@@ -226,51 +660,16 @@ class PythonRecipeHandler(RecipeHandler): | |||
226 | 660 | ||
227 | if license_str: | 661 | if license_str: |
228 | for i, line in enumerate(lines_before): | 662 | for i, line in enumerate(lines_before): |
229 | if line.startswith('LICENSE = '): | 663 | if line.startswith('##LICENSE_PLACEHOLDER##'): |
230 | lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) | 664 | lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) |
231 | break | 665 | break |
232 | 666 | ||
233 | if 'Classifier' in info: | 667 | if 'Classifier' in info: |
234 | existing_licenses = info.get('License', '') | 668 | license = self.handle_classifier_license(info['Classifier'], info.get('License', '')) |
235 | licenses = [] | 669 | if license: |
236 | for classifier in info['Classifier']: | 670 | info['License'] = license |
237 | if classifier in self.classifier_license_map: | ||
238 | license = self.classifier_license_map[classifier] | ||
239 | if license == 'Apache' and 'Apache-2.0' in existing_licenses: | ||
240 | license = 'Apache-2.0' | ||
241 | elif license == 'GPL': | ||
242 | if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses: | ||
243 | license = 'GPL-2.0' | ||
244 | elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses: | ||
245 | license = 'GPL-3.0' | ||
246 | elif license == 'LGPL': | ||
247 | if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses: | ||
248 | license = 'LGPL-2.1' | ||
249 | elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses: | ||
250 | license = 'LGPL-2.0' | ||
251 | elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses: | ||
252 | license = 'LGPL-3.0' | ||
253 | licenses.append(license) | ||
254 | |||
255 | if licenses: | ||
256 | info['License'] = ' & '.join(licenses) | ||
257 | 671 | ||
258 | # Map PKG-INFO & setup.py fields to bitbake variables | 672 | self.map_info_to_bbvar(info, extravalues) |
259 | for field, values in info.items(): | ||
260 | if field in self.excluded_fields: | ||
261 | continue | ||
262 | |||
263 | if field not in self.bbvar_map: | ||
264 | continue | ||
265 | |||
266 | if isinstance(values, str): | ||
267 | value = values | ||
268 | else: | ||
269 | value = ' '.join(str(v) for v in values if v) | ||
270 | |||
271 | bbvar = self.bbvar_map[field] | ||
272 | if bbvar not in extravalues and value: | ||
273 | extravalues[bbvar] = value | ||
274 | 673 | ||
275 | mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) | 674 | mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) |
276 | 675 | ||
@@ -281,6 +680,7 @@ class PythonRecipeHandler(RecipeHandler): | |||
281 | lines_after.append('# The following configs & dependencies are from setuptools extras_require.') | 680 | lines_after.append('# The following configs & dependencies are from setuptools extras_require.') |
282 | lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') | 681 | lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') |
283 | lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') | 682 | lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') |
683 | lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.') | ||
284 | lines_after.append('#') | 684 | lines_after.append('#') |
285 | lines_after.append('# Uncomment this line to enable all the optional features.') | 685 | lines_after.append('# Uncomment this line to enable all the optional features.') |
286 | lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) | 686 | lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) |
@@ -326,275 +726,283 @@ class PythonRecipeHandler(RecipeHandler): | |||
326 | 726 | ||
327 | handled.append('buildsystem') | 727 | handled.append('buildsystem') |
328 | 728 | ||
329 | def get_pkginfo(self, pkginfo_fn): | 729 | class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler): |
330 | msg = email.message_from_file(open(pkginfo_fn, 'r')) | 730 | """Base class to support PEP517 and PEP518 |
331 | msginfo = {} | 731 | |
332 | for field in msg.keys(): | 732 | PEP517 https://peps.python.org/pep-0517/#source-trees |
333 | values = msg.get_all(field) | 733 | PEP518 https://peps.python.org/pep-0518/#build-system-table |
334 | if len(values) == 1: | 734 | """ |
335 | msginfo[field] = values[0] | 735 | # bitbake currently supports the 4 following backends |
336 | else: | 736 | build_backend_map = { |
337 | msginfo[field] = values | 737 | "setuptools.build_meta": "python_setuptools_build_meta", |
338 | return msginfo | 738 | "poetry.core.masonry.api": "python_poetry_core", |
739 | "flit_core.buildapi": "python_flit_core", | ||
740 | "hatchling.build": "python_hatchling", | ||
741 | "maturin": "python_maturin", | ||
742 | "mesonpy": "python_mesonpy", | ||
743 | } | ||
339 | 744 | ||
340 | def parse_setup_py(self, setupscript='./setup.py'): | 745 | # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml |
341 | with codecs.open(setupscript) as f: | 746 | # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata |
342 | info, imported_modules, non_literals, extensions = gather_setup_info(f) | 747 | # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/ |
748 | # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry | ||
749 | # and "Homepage" for "project" section. So keep both | ||
750 | bbvar_map = { | ||
751 | "name": "PN", | ||
752 | "version": "PV", | ||
753 | "Homepage": "HOMEPAGE", | ||
754 | "homepage": "HOMEPAGE", | ||
755 | "description": "SUMMARY", | ||
756 | "license": "LICENSE", | ||
757 | "dependencies": "RDEPENDS:${PN}", | ||
758 | "requires": "DEPENDS", | ||
759 | } | ||
343 | 760 | ||
344 | def _map(key): | 761 | replacements = [ |
345 | key = key.replace('_', '-') | 762 | ("license", r" +$", ""), |
346 | key = key[0].upper() + key[1:] | 763 | ("license", r"^ +", ""), |
347 | if key in self.setup_parse_map: | 764 | ("license", r" ", "-"), |
348 | key = self.setup_parse_map[key] | 765 | ("license", r"^GNU-", ""), |
349 | return key | 766 | ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""), |
767 | ("license", r"^UNKNOWN$", ""), | ||
768 | # Remove currently unhandled version numbers from these variables | ||
769 | ("requires", r"\[[^\]]+\]$", ""), | ||
770 | ("requires", r"^([^><= ]+).*", r"\1"), | ||
771 | ("dependencies", r"\[[^\]]+\]$", ""), | ||
772 | ("dependencies", r"^([^><= ]+).*", r"\1"), | ||
773 | ] | ||
350 | 774 | ||
351 | # Naive mapping of setup() arguments to PKG-INFO field names | 775 | excluded_native_pkgdeps = [ |
352 | for d in [info, non_literals]: | 776 | # already provided by python_setuptools_build_meta.bbclass |
353 | for key, value in list(d.items()): | 777 | "python3-setuptools-native", |
354 | if key is None: | 778 | "python3-wheel-native", |
355 | continue | 779 | # already provided by python_poetry_core.bbclass |
356 | new_key = _map(key) | 780 | "python3-poetry-core-native", |
357 | if new_key != key: | 781 | # already provided by python_flit_core.bbclass |
358 | del d[key] | 782 | "python3-flit-core-native", |
359 | d[new_key] = value | 783 | # already provided by python_mesonpy |
784 | "python3-meson-python-native", | ||
785 | ] | ||
360 | 786 | ||
361 | return info, 'setuptools' in imported_modules, non_literals, extensions | 787 | # add here a list of known and often used packages and the corresponding bitbake package |
788 | known_deps_map = { | ||
789 | "setuptools": "python3-setuptools", | ||
790 | "wheel": "python3-wheel", | ||
791 | "poetry-core": "python3-poetry-core", | ||
792 | "flit_core": "python3-flit-core", | ||
793 | "setuptools-scm": "python3-setuptools-scm", | ||
794 | "hatchling": "python3-hatchling", | ||
795 | "hatch-vcs": "python3-hatch-vcs", | ||
796 | "meson-python" : "python3-meson-python", | ||
797 | } | ||
362 | 798 | ||
363 | def get_setup_args_info(self, setupscript='./setup.py'): | 799 | def __init__(self): |
364 | cmd = ['python3', setupscript] | 800 | pass |
365 | info = {} | ||
366 | keys = set(self.bbvar_map.keys()) | ||
367 | keys |= set(self.setuparg_list_fields) | ||
368 | keys |= set(self.setuparg_multi_line_values) | ||
369 | grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values)) | ||
370 | for index, keys in grouped_keys: | ||
371 | if index == (True, False): | ||
372 | # Splitlines output for each arg as a list value | ||
373 | for key in keys: | ||
374 | arg = self.setuparg_map.get(key, key.lower()) | ||
375 | try: | ||
376 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) | ||
377 | except (OSError, subprocess.CalledProcessError): | ||
378 | pass | ||
379 | else: | ||
380 | info[key] = [l.rstrip() for l in arg_info.splitlines()] | ||
381 | elif index == (False, True): | ||
382 | # Entire output for each arg | ||
383 | for key in keys: | ||
384 | arg = self.setuparg_map.get(key, key.lower()) | ||
385 | try: | ||
386 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) | ||
387 | except (OSError, subprocess.CalledProcessError): | ||
388 | pass | ||
389 | else: | ||
390 | info[key] = arg_info | ||
391 | else: | ||
392 | info.update(self.get_setup_byline(list(keys), setupscript)) | ||
393 | return info | ||
394 | 801 | ||
395 | def get_setup_byline(self, fields, setupscript='./setup.py'): | 802 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): |
396 | info = {} | 803 | info = {} |
804 | metadata = {} | ||
397 | 805 | ||
398 | cmd = ['python3', setupscript] | 806 | if 'buildsystem' in handled: |
399 | cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) | 807 | return False |
400 | try: | ||
401 | info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines() | ||
402 | except (OSError, subprocess.CalledProcessError): | ||
403 | pass | ||
404 | else: | ||
405 | if len(fields) != len(info_lines): | ||
406 | logger.error('Mismatch between setup.py output lines and number of fields') | ||
407 | sys.exit(1) | ||
408 | |||
409 | for lineno, line in enumerate(info_lines): | ||
410 | line = line.rstrip() | ||
411 | info[fields[lineno]] = line | ||
412 | return info | ||
413 | |||
414 | def apply_info_replacements(self, info): | ||
415 | for variable, search, replace in self.replacements: | ||
416 | if variable not in info: | ||
417 | continue | ||
418 | |||
419 | def replace_value(search, replace, value): | ||
420 | if replace is None: | ||
421 | if re.search(search, value): | ||
422 | return None | ||
423 | else: | ||
424 | new_value = re.sub(search, replace, value) | ||
425 | if value != new_value: | ||
426 | return new_value | ||
427 | return value | ||
428 | |||
429 | value = info[variable] | ||
430 | if isinstance(value, str): | ||
431 | new_value = replace_value(search, replace, value) | ||
432 | if new_value is None: | ||
433 | del info[variable] | ||
434 | elif new_value != value: | ||
435 | info[variable] = new_value | ||
436 | elif hasattr(value, 'items'): | ||
437 | for dkey, dvalue in list(value.items()): | ||
438 | new_list = [] | ||
439 | for pos, a_value in enumerate(dvalue): | ||
440 | new_value = replace_value(search, replace, a_value) | ||
441 | if new_value is not None and new_value != value: | ||
442 | new_list.append(new_value) | ||
443 | |||
444 | if value != new_list: | ||
445 | value[dkey] = new_list | ||
446 | else: | ||
447 | new_list = [] | ||
448 | for pos, a_value in enumerate(value): | ||
449 | new_value = replace_value(search, replace, a_value) | ||
450 | if new_value is not None and new_value != value: | ||
451 | new_list.append(new_value) | ||
452 | |||
453 | if value != new_list: | ||
454 | info[variable] = new_list | ||
455 | |||
456 | def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals): | ||
457 | if 'Package-dir' in setup_info: | ||
458 | package_dir = setup_info['Package-dir'] | ||
459 | else: | ||
460 | package_dir = {} | ||
461 | |||
462 | class PackageDir(distutils.command.build_py.build_py): | ||
463 | def __init__(self, package_dir): | ||
464 | self.package_dir = package_dir | ||
465 | |||
466 | pd = PackageDir(package_dir) | ||
467 | to_scan = [] | ||
468 | if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']): | ||
469 | if 'Py-modules' in setup_info: | ||
470 | for module in setup_info['Py-modules']: | ||
471 | try: | ||
472 | package, module = module.rsplit('.', 1) | ||
473 | except ValueError: | ||
474 | package, module = '.', module | ||
475 | module_path = os.path.join(pd.get_package_dir(package), module + '.py') | ||
476 | to_scan.append(module_path) | ||
477 | 808 | ||
478 | if 'Packages' in setup_info: | 809 | logger.debug("Trying pyproject.toml parser") |
479 | for package in setup_info['Packages']: | ||
480 | to_scan.append(pd.get_package_dir(package)) | ||
481 | 810 | ||
482 | if 'Scripts' in setup_info: | 811 | # Check for non-zero size setup.py files |
483 | to_scan.extend(setup_info['Scripts']) | 812 | setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"]) |
813 | for fn in setupfiles: | ||
814 | if os.path.getsize(fn): | ||
815 | break | ||
484 | else: | 816 | else: |
485 | logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") | 817 | logger.debug("No pyproject.toml found") |
486 | 818 | return False | |
487 | if not to_scan: | ||
488 | to_scan = ['.'] | ||
489 | |||
490 | logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan)) | ||
491 | 819 | ||
492 | provided_packages = self.parse_pkgdata_for_python_packages() | 820 | setupscript = os.path.join(srctree, "pyproject.toml") |
493 | scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan]) | ||
494 | mapped_deps, unmapped_deps = set(self.base_pkgdeps), set() | ||
495 | for dep in scanned_deps: | ||
496 | mapped = provided_packages.get(dep) | ||
497 | if mapped: | ||
498 | logger.debug('Mapped %s to %s' % (dep, mapped)) | ||
499 | mapped_deps.add(mapped) | ||
500 | else: | ||
501 | logger.debug('Could not map %s' % dep) | ||
502 | unmapped_deps.add(dep) | ||
503 | return mapped_deps, unmapped_deps | ||
504 | 821 | ||
505 | def scan_python_dependencies(self, paths): | ||
506 | deps = set() | ||
507 | try: | 822 | try: |
508 | dep_output = self.run_command(['pythondeps', '-d'] + paths) | 823 | try: |
509 | except (OSError, subprocess.CalledProcessError): | 824 | import tomllib |
510 | pass | 825 | except ImportError: |
511 | else: | 826 | try: |
512 | for line in dep_output.splitlines(): | 827 | import tomli as tomllib |
513 | line = line.rstrip() | 828 | except ImportError: |
514 | dep, filename = line.split('\t', 1) | 829 | logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.") |
515 | if filename.endswith('/setup.py'): | 830 | return False |
516 | continue | 831 | |
517 | deps.add(dep) | 832 | try: |
833 | with open(setupscript, "rb") as f: | ||
834 | config = tomllib.load(f) | ||
835 | except Exception: | ||
836 | logger.exception("Failed to parse pyproject.toml") | ||
837 | return False | ||
838 | |||
839 | build_backend = config["build-system"]["build-backend"] | ||
840 | if build_backend in self.build_backend_map: | ||
841 | classes.append(self.build_backend_map[build_backend]) | ||
842 | else: | ||
843 | logger.error( | ||
844 | "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py" | ||
845 | % build_backend | ||
846 | ) | ||
847 | return False | ||
518 | 848 | ||
519 | try: | 849 | licfile = "" |
520 | provides_output = self.run_command(['pythondeps', '-p'] + paths) | ||
521 | except (OSError, subprocess.CalledProcessError): | ||
522 | pass | ||
523 | else: | ||
524 | provides_lines = (l.rstrip() for l in provides_output.splitlines()) | ||
525 | provides = set(l for l in provides_lines if l and l != 'setup') | ||
526 | deps -= provides | ||
527 | 850 | ||
528 | return deps | 851 | if build_backend == "poetry.core.masonry.api": |
852 | if "tool" in config and "poetry" in config["tool"]: | ||
853 | metadata = config["tool"]["poetry"] | ||
854 | else: | ||
855 | if "project" in config: | ||
856 | metadata = config["project"] | ||
857 | |||
858 | if metadata: | ||
859 | for field, values in metadata.items(): | ||
860 | if field == "license": | ||
861 | # For setuptools.build_meta and flit, licence is a table | ||
862 | # but for poetry licence is a string | ||
863 | # for hatchling, both table (jsonschema) and string (iniconfig) have been used | ||
864 | if build_backend == "poetry.core.masonry.api": | ||
865 | value = values | ||
866 | else: | ||
867 | value = values.get("text", "") | ||
868 | if not value: | ||
869 | licfile = values.get("file", "") | ||
870 | continue | ||
871 | elif field == "dependencies" and build_backend == "poetry.core.masonry.api": | ||
872 | # For poetry backend, "dependencies" section looks like: | ||
873 | # [tool.poetry.dependencies] | ||
874 | # requests = "^2.13.0" | ||
875 | # requests = { version = "^2.13.0", source = "private" } | ||
876 | # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details | ||
877 | # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list | ||
878 | value = [] | ||
879 | for k in values.keys(): | ||
880 | value.append(k) | ||
881 | elif isinstance(values, dict): | ||
882 | for k, v in values.items(): | ||
883 | info[k] = v | ||
884 | continue | ||
885 | else: | ||
886 | value = values | ||
529 | 887 | ||
530 | def parse_pkgdata_for_python_packages(self): | 888 | info[field] = value |
531 | suffixes = [t[0] for t in imp.get_suffixes()] | ||
532 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') | ||
533 | 889 | ||
534 | ldata = tinfoil.config_data.createCopy() | 890 | # Grab the license value before applying replacements |
535 | bb.parse.handle('classes/python3-dir.bbclass', ldata, True) | 891 | license_str = info.get("license", "").strip() |
536 | python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR') | ||
537 | 892 | ||
538 | dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') | 893 | if license_str: |
539 | python_dirs = [python_sitedir + os.sep, | 894 | for i, line in enumerate(lines_before): |
540 | os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, | 895 | if line.startswith("##LICENSE_PLACEHOLDER##"): |
541 | os.path.dirname(python_sitedir) + os.sep] | 896 | lines_before.insert( |
542 | packages = {} | 897 | i, "# NOTE: License in pyproject.toml is: %s" % license_str |
543 | for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): | 898 | ) |
544 | files_info = None | ||
545 | with open(pkgdatafile, 'r') as f: | ||
546 | for line in f.readlines(): | ||
547 | field, value = line.split(': ', 1) | ||
548 | if field.startswith('FILES_INFO'): | ||
549 | files_info = ast.literal_eval(value) | ||
550 | break | 899 | break |
551 | else: | ||
552 | continue | ||
553 | 900 | ||
554 | for fn in files_info: | 901 | info["requires"] = config["build-system"]["requires"] |
555 | for suffix in suffixes: | 902 | |
556 | if fn.endswith(suffix): | 903 | self.apply_info_replacements(info) |
557 | break | 904 | |
558 | else: | 905 | if "classifiers" in info: |
559 | continue | 906 | license = self.handle_classifier_license( |
907 | info["classifiers"], info.get("license", "") | ||
908 | ) | ||
909 | if license: | ||
910 | if licfile: | ||
911 | lines = [] | ||
912 | md5value = bb.utils.md5_file(os.path.join(srctree, licfile)) | ||
913 | lines.append('LICENSE = "%s"' % license) | ||
914 | lines.append( | ||
915 | 'LIC_FILES_CHKSUM = "file://%s;md5=%s"' | ||
916 | % (licfile, md5value) | ||
917 | ) | ||
918 | lines.append("") | ||
919 | |||
920 | # Replace the placeholder so we get the values in the right place in the recipe file | ||
921 | try: | ||
922 | pos = lines_before.index("##LICENSE_PLACEHOLDER##") | ||
923 | except ValueError: | ||
924 | pos = -1 | ||
925 | if pos == -1: | ||
926 | lines_before.extend(lines) | ||
927 | else: | ||
928 | lines_before[pos : pos + 1] = lines | ||
560 | 929 | ||
561 | if fn.startswith(dynload_dir + os.sep): | 930 | handled.append(("license", [license, licfile, md5value])) |
562 | if '/.debug/' in fn: | 931 | else: |
563 | continue | 932 | info["license"] = license |
564 | base = os.path.basename(fn) | ||
565 | provided = base.split('.', 1)[0] | ||
566 | packages[provided] = os.path.basename(pkgdatafile) | ||
567 | continue | ||
568 | 933 | ||
569 | for python_dir in python_dirs: | 934 | provided_packages = self.parse_pkgdata_for_python_packages() |
570 | if fn.startswith(python_dir): | 935 | provided_packages.update(self.known_deps_map) |
571 | relpath = fn[len(python_dir):] | 936 | native_mapped_deps, native_unmapped_deps = set(), set() |
572 | relstart, _, relremaining = relpath.partition(os.sep) | 937 | mapped_deps, unmapped_deps = set(), set() |
573 | if relstart.endswith('.egg'): | ||
574 | relpath = relremaining | ||
575 | base, _ = os.path.splitext(relpath) | ||
576 | 938 | ||
577 | if '/.debug/' in base: | 939 | if "requires" in info: |
578 | continue | 940 | for require in info["requires"]: |
579 | if os.path.basename(base) == '__init__': | 941 | mapped = provided_packages.get(require) |
580 | base = os.path.dirname(base) | ||
581 | base = base.replace(os.sep + os.sep, os.sep) | ||
582 | provided = base.replace(os.sep, '.') | ||
583 | packages[provided] = os.path.basename(pkgdatafile) | ||
584 | return packages | ||
585 | 942 | ||
586 | @classmethod | 943 | if mapped: |
587 | def run_command(cls, cmd, **popenargs): | 944 | logger.debug("Mapped %s to %s" % (require, mapped)) |
588 | if 'stderr' not in popenargs: | 945 | native_mapped_deps.add(mapped) |
589 | popenargs['stderr'] = subprocess.STDOUT | 946 | else: |
590 | try: | 947 | logger.debug("Could not map %s" % require) |
591 | return subprocess.check_output(cmd, **popenargs).decode('utf-8') | 948 | native_unmapped_deps.add(require) |
592 | except OSError as exc: | 949 | |
593 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) | 950 | info.pop("requires") |
594 | raise | 951 | |
595 | except subprocess.CalledProcessError as exc: | 952 | if native_mapped_deps != set(): |
596 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) | 953 | native_mapped_deps = { |
597 | raise | 954 | item + "-native" for item in native_mapped_deps |
955 | } | ||
956 | native_mapped_deps -= set(self.excluded_native_pkgdeps) | ||
957 | if native_mapped_deps != set(): | ||
958 | info["requires"] = " ".join(sorted(native_mapped_deps)) | ||
959 | |||
960 | if native_unmapped_deps: | ||
961 | lines_after.append("") | ||
962 | lines_after.append( | ||
963 | "# WARNING: We were unable to map the following python package/module" | ||
964 | ) | ||
965 | lines_after.append( | ||
966 | "# dependencies to the bitbake packages which include them:" | ||
967 | ) | ||
968 | lines_after.extend( | ||
969 | "# {}".format(d) for d in sorted(native_unmapped_deps) | ||
970 | ) | ||
971 | |||
972 | if "dependencies" in info: | ||
973 | for dependency in info["dependencies"]: | ||
974 | mapped = provided_packages.get(dependency) | ||
975 | if mapped: | ||
976 | logger.debug("Mapped %s to %s" % (dependency, mapped)) | ||
977 | mapped_deps.add(mapped) | ||
978 | else: | ||
979 | logger.debug("Could not map %s" % dependency) | ||
980 | unmapped_deps.add(dependency) | ||
981 | |||
982 | info.pop("dependencies") | ||
983 | |||
984 | if mapped_deps != set(): | ||
985 | if mapped_deps != set(): | ||
986 | info["dependencies"] = " ".join(sorted(mapped_deps)) | ||
987 | |||
988 | if unmapped_deps: | ||
989 | lines_after.append("") | ||
990 | lines_after.append( | ||
991 | "# WARNING: We were unable to map the following python package/module" | ||
992 | ) | ||
993 | lines_after.append( | ||
994 | "# runtime dependencies to the bitbake packages which include them:" | ||
995 | ) | ||
996 | lines_after.extend( | ||
997 | "# {}".format(d) for d in sorted(unmapped_deps) | ||
998 | ) | ||
999 | |||
1000 | self.map_info_to_bbvar(info, extravalues) | ||
1001 | |||
1002 | handled.append("buildsystem") | ||
1003 | except Exception: | ||
1004 | logger.exception("Failed to correctly handle pyproject.toml, falling back to another method") | ||
1005 | return False | ||
598 | 1006 | ||
599 | 1007 | ||
600 | def gather_setup_info(fileobj): | 1008 | def gather_setup_info(fileobj): |
@@ -710,5 +1118,7 @@ def has_non_literals(value): | |||
710 | 1118 | ||
711 | 1119 | ||
712 | def register_recipe_handlers(handlers): | 1120 | def register_recipe_handlers(handlers): |
713 | # We need to make sure this is ahead of the makefile fallback handler | 1121 | # We need to make sure these are ahead of the makefile fallback handler |
714 | handlers.append((PythonRecipeHandler(), 70)) | 1122 | # and the pyproject.toml handler ahead of the setup.py handler |
1123 | handlers.append((PythonPyprojectTomlRecipeHandler(), 75)) | ||
1124 | handlers.append((PythonSetupPyRecipeHandler(), 70)) | ||
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py new file mode 100644 index 0000000000..4b1fa39d13 --- /dev/null +++ b/scripts/lib/recipetool/create_go.py | |||
@@ -0,0 +1,174 @@ | |||
1 | # Recipe creation tool - go support plugin | ||
2 | # | ||
3 | # The code is based on golang internals. See the afftected | ||
4 | # methods for further reference and information. | ||
5 | # | ||
6 | # Copyright (C) 2023 Weidmueller GmbH & Co KG | ||
7 | # Author: Lukas Funke <lukas.funke@weidmueller.com> | ||
8 | # | ||
9 | # SPDX-License-Identifier: GPL-2.0-only | ||
10 | # | ||
11 | |||
12 | |||
13 | from recipetool.create import RecipeHandler, handle_license_vars | ||
14 | |||
15 | import bb.utils | ||
16 | import json | ||
17 | import logging | ||
18 | import os | ||
19 | import re | ||
20 | import subprocess | ||
21 | import sys | ||
22 | import tempfile | ||
23 | |||
24 | |||
25 | logger = logging.getLogger('recipetool') | ||
26 | |||
27 | tinfoil = None | ||
28 | |||
29 | |||
30 | def tinfoil_init(instance): | ||
31 | global tinfoil | ||
32 | tinfoil = instance | ||
33 | |||
34 | |||
35 | |||
36 | class GoRecipeHandler(RecipeHandler): | ||
37 | """Class to handle the go recipe creation""" | ||
38 | |||
39 | @staticmethod | ||
40 | def __ensure_go(): | ||
41 | """Check if the 'go' command is available in the recipes""" | ||
42 | recipe = "go-native" | ||
43 | if not tinfoil.recipes_parsed: | ||
44 | tinfoil.parse_recipes() | ||
45 | try: | ||
46 | rd = tinfoil.parse_recipe(recipe) | ||
47 | except bb.providers.NoProvider: | ||
48 | bb.error( | ||
49 | "Nothing provides '%s' which is required for the build" % (recipe)) | ||
50 | bb.note( | ||
51 | "You will likely need to add a layer that provides '%s'" % (recipe)) | ||
52 | return None | ||
53 | |||
54 | bindir = rd.getVar('STAGING_BINDIR_NATIVE') | ||
55 | gopath = os.path.join(bindir, 'go') | ||
56 | |||
57 | if not os.path.exists(gopath): | ||
58 | tinfoil.build_targets(recipe, 'addto_recipe_sysroot') | ||
59 | |||
60 | if not os.path.exists(gopath): | ||
61 | logger.error( | ||
62 | '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe) | ||
63 | return None | ||
64 | |||
65 | return bindir | ||
66 | |||
67 | def process(self, srctree, classes, lines_before, | ||
68 | lines_after, handled, extravalues): | ||
69 | |||
70 | if 'buildsystem' in handled: | ||
71 | return False | ||
72 | |||
73 | files = RecipeHandler.checkfiles(srctree, ['go.mod']) | ||
74 | if not files: | ||
75 | return False | ||
76 | |||
77 | go_bindir = self.__ensure_go() | ||
78 | if not go_bindir: | ||
79 | sys.exit(14) | ||
80 | |||
81 | handled.append('buildsystem') | ||
82 | classes.append("go-mod") | ||
83 | |||
84 | # Use go-mod-update-modules to set the full SRC_URI and LICENSE | ||
85 | classes.append("go-mod-update-modules") | ||
86 | extravalues["run_tasks"] = "update_modules" | ||
87 | |||
88 | with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir: | ||
89 | env = dict(os.environ) | ||
90 | env["PATH"] += f":{go_bindir}" | ||
91 | env['GOMODCACHE'] = tmp_mod_dir | ||
92 | |||
93 | stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True) | ||
94 | go_mod = json.loads(stdout) | ||
95 | go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path']) | ||
96 | |||
97 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') | ||
98 | extravalues.setdefault('extrafiles', {}) | ||
99 | |||
100 | # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files | ||
101 | basename = "{pn}-licenses.inc" | ||
102 | filename = os.path.join(localfilesdir, basename) | ||
103 | with open(filename, "w") as f: | ||
104 | f.write("# FROM RECIPETOOL\n") | ||
105 | extravalues['extrafiles'][f"../{basename}"] = filename | ||
106 | |||
107 | basename = "{pn}-go-mods.inc" | ||
108 | filename = os.path.join(localfilesdir, basename) | ||
109 | with open(filename, "w") as f: | ||
110 | f.write("# FROM RECIPETOOL\n") | ||
111 | extravalues['extrafiles'][f"../{basename}"] = filename | ||
112 | |||
113 | # Do generic license handling | ||
114 | d = bb.data.createCopy(tinfoil.config_data) | ||
115 | handle_license_vars(srctree, lines_before, handled, extravalues, d) | ||
116 | self.__rewrite_lic_vars(lines_before) | ||
117 | |||
118 | self.__rewrite_src_uri(lines_before) | ||
119 | |||
120 | lines_before.append('require ${BPN}-licenses.inc') | ||
121 | lines_before.append('require ${BPN}-go-mods.inc') | ||
122 | lines_before.append(f'GO_IMPORT = "{go_import}"') | ||
123 | |||
124 | def __update_lines_before(self, updated, newlines, lines_before): | ||
125 | if updated: | ||
126 | del lines_before[:] | ||
127 | for line in newlines: | ||
128 | # Hack to avoid newlines that edit_metadata inserts | ||
129 | if line.endswith('\n'): | ||
130 | line = line[:-1] | ||
131 | lines_before.append(line) | ||
132 | return updated | ||
133 | |||
134 | def __rewrite_lic_vars(self, lines_before): | ||
135 | def varfunc(varname, origvalue, op, newlines): | ||
136 | import urllib.parse | ||
137 | if varname == 'LIC_FILES_CHKSUM': | ||
138 | new_licenses = [] | ||
139 | licenses = origvalue.split('\\') | ||
140 | for license in licenses: | ||
141 | if not license: | ||
142 | logger.warning("No license file was detected for the main module!") | ||
143 | # the license list of the main recipe must be empty | ||
144 | # this can happen for example in case of CLOSED license | ||
145 | # Fall through to complete recipe generation | ||
146 | continue | ||
147 | license = license.strip() | ||
148 | uri, chksum = license.split(';', 1) | ||
149 | url = urllib.parse.urlparse(uri) | ||
150 | new_uri = os.path.join( | ||
151 | url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum | ||
152 | new_licenses.append(new_uri) | ||
153 | |||
154 | return new_licenses, None, -1, True | ||
155 | return origvalue, None, 0, True | ||
156 | |||
157 | updated, newlines = bb.utils.edit_metadata( | ||
158 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) | ||
159 | return self.__update_lines_before(updated, newlines, lines_before) | ||
160 | |||
161 | def __rewrite_src_uri(self, lines_before): | ||
162 | |||
163 | def varfunc(varname, origvalue, op, newlines): | ||
164 | if varname == 'SRC_URI': | ||
165 | src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'] | ||
166 | return src_uri, None, -1, True | ||
167 | return origvalue, None, 0, True | ||
168 | |||
169 | updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc) | ||
170 | return self.__update_lines_before(updated, newlines, lines_before) | ||
171 | |||
172 | |||
173 | def register_recipe_handlers(handlers): | ||
174 | handlers.append((GoRecipeHandler(), 60)) | ||
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py index 3394a89970..8c4cdd5234 100644 --- a/scripts/lib/recipetool/create_npm.py +++ b/scripts/lib/recipetool/create_npm.py | |||
@@ -13,10 +13,11 @@ import sys | |||
13 | import tempfile | 13 | import tempfile |
14 | import bb | 14 | import bb |
15 | from bb.fetch2.npm import NpmEnvironment | 15 | from bb.fetch2.npm import NpmEnvironment |
16 | from bb.fetch2.npm import npm_package | ||
16 | from bb.fetch2.npmsw import foreach_dependencies | 17 | from bb.fetch2.npmsw import foreach_dependencies |
18 | from oe.license_finder import match_licenses, find_license_files | ||
17 | from recipetool.create import RecipeHandler | 19 | from recipetool.create import RecipeHandler |
18 | from recipetool.create import get_license_md5sums | 20 | from recipetool.create import generate_common_licenses_chksums |
19 | from recipetool.create import guess_license | ||
20 | from recipetool.create import split_pkg_licenses | 21 | from recipetool.create import split_pkg_licenses |
21 | logger = logging.getLogger('recipetool') | 22 | logger = logging.getLogger('recipetool') |
22 | 23 | ||
@@ -31,15 +32,6 @@ class NpmRecipeHandler(RecipeHandler): | |||
31 | """Class to handle the npm recipe creation""" | 32 | """Class to handle the npm recipe creation""" |
32 | 33 | ||
33 | @staticmethod | 34 | @staticmethod |
34 | def _npm_name(name): | ||
35 | """Generate a Yocto friendly npm name""" | ||
36 | name = re.sub("/", "-", name) | ||
37 | name = name.lower() | ||
38 | name = re.sub(r"[^\-a-z0-9]", "", name) | ||
39 | name = name.strip("-") | ||
40 | return name | ||
41 | |||
42 | @staticmethod | ||
43 | def _get_registry(lines): | 35 | def _get_registry(lines): |
44 | """Get the registry value from the 'npm://registry' url""" | 36 | """Get the registry value from the 'npm://registry' url""" |
45 | registry = None | 37 | registry = None |
@@ -120,41 +112,71 @@ class NpmRecipeHandler(RecipeHandler): | |||
120 | """Return the extra license files and the list of packages""" | 112 | """Return the extra license files and the list of packages""" |
121 | licfiles = [] | 113 | licfiles = [] |
122 | packages = {} | 114 | packages = {} |
115 | # Licenses from package.json will point to COMMON_LICENSE_DIR so we need | ||
116 | # to associate them explicitely to packages for split_pkg_licenses() | ||
117 | fallback_licenses = dict() | ||
118 | |||
119 | def _find_package_licenses(destdir): | ||
120 | """Either find license files, or use package.json metadata""" | ||
121 | def _get_licenses_from_package_json(package_json): | ||
122 | with open(os.path.join(srctree, package_json), "r") as f: | ||
123 | data = json.load(f) | ||
124 | if "license" in data: | ||
125 | licenses = data["license"].split(" ") | ||
126 | licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"] | ||
127 | return [], licenses | ||
128 | else: | ||
129 | return [package_json], None | ||
123 | 130 | ||
124 | # Handle the parent package | ||
125 | packages["${PN}"] = "" | ||
126 | |||
127 | def _licfiles_append_fallback_readme_files(destdir): | ||
128 | """Append README files as fallback to license files if a license files is missing""" | ||
129 | |||
130 | fallback = True | ||
131 | readmes = [] | ||
132 | basedir = os.path.join(srctree, destdir) | 131 | basedir = os.path.join(srctree, destdir) |
133 | for fn in os.listdir(basedir): | 132 | licfiles = find_license_files(basedir) |
134 | upper = fn.upper() | 133 | if len(licfiles) > 0: |
135 | if upper.startswith("README"): | 134 | return licfiles, None |
136 | fullpath = os.path.join(basedir, fn) | 135 | else: |
137 | readmes.append(fullpath) | 136 | # A license wasn't found in the package directory, so we'll use the package.json metadata |
138 | if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: | 137 | pkg_json = os.path.join(basedir, "package.json") |
139 | fallback = False | 138 | return _get_licenses_from_package_json(pkg_json) |
140 | if fallback: | 139 | |
141 | for readme in readmes: | 140 | def _get_package_licenses(destdir, package): |
142 | licfiles.append(os.path.relpath(readme, srctree)) | 141 | (package_licfiles, package_licenses) = _find_package_licenses(destdir) |
142 | if package_licfiles: | ||
143 | licfiles.extend(package_licfiles) | ||
144 | else: | ||
145 | fallback_licenses[package] = package_licenses | ||
143 | 146 | ||
144 | # Handle the dependencies | 147 | # Handle the dependencies |
145 | def _handle_dependency(name, params, deptree): | 148 | def _handle_dependency(name, params, destdir): |
146 | suffix = "-".join([self._npm_name(dep) for dep in deptree]) | 149 | deptree = destdir.split('node_modules/') |
147 | destdirs = [os.path.join("node_modules", dep) for dep in deptree] | 150 | suffix = "-".join([npm_package(dep) for dep in deptree]) |
148 | destdir = os.path.join(*destdirs) | 151 | packages["${PN}" + suffix] = destdir |
149 | packages["${PN}-" + suffix] = destdir | 152 | _get_package_licenses(destdir, "${PN}" + suffix) |
150 | _licfiles_append_fallback_readme_files(destdir) | ||
151 | 153 | ||
152 | with open(shrinkwrap_file, "r") as f: | 154 | with open(shrinkwrap_file, "r") as f: |
153 | shrinkwrap = json.load(f) | 155 | shrinkwrap = json.load(f) |
154 | |||
155 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) | 156 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) |
156 | 157 | ||
157 | return licfiles, packages | 158 | # Handle the parent package |
159 | packages["${PN}"] = "" | ||
160 | _get_package_licenses(srctree, "${PN}") | ||
161 | |||
162 | return licfiles, packages, fallback_licenses | ||
163 | |||
164 | # Handle the peer dependencies | ||
165 | def _handle_peer_dependency(self, shrinkwrap_file): | ||
166 | """Check if package has peer dependencies and show warning if it is the case""" | ||
167 | with open(shrinkwrap_file, "r") as f: | ||
168 | shrinkwrap = json.load(f) | ||
169 | |||
170 | packages = shrinkwrap.get("packages", {}) | ||
171 | peer_deps = packages.get("", {}).get("peerDependencies", {}) | ||
172 | |||
173 | for peer_dep in peer_deps: | ||
174 | peer_dep_yocto_name = npm_package(peer_dep) | ||
175 | bb.warn(peer_dep + " is a peer dependencie of the actual package. " + | ||
176 | "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool" | ||
177 | % peer_dep_yocto_name) | ||
178 | |||
179 | |||
158 | 180 | ||
159 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | 181 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): |
160 | """Handle the npm recipe creation""" | 182 | """Handle the npm recipe creation""" |
@@ -173,7 +195,7 @@ class NpmRecipeHandler(RecipeHandler): | |||
173 | if "name" not in data or "version" not in data: | 195 | if "name" not in data or "version" not in data: |
174 | return False | 196 | return False |
175 | 197 | ||
176 | extravalues["PN"] = self._npm_name(data["name"]) | 198 | extravalues["PN"] = npm_package(data["name"]) |
177 | extravalues["PV"] = data["version"] | 199 | extravalues["PV"] = data["version"] |
178 | 200 | ||
179 | if "description" in data: | 201 | if "description" in data: |
@@ -242,7 +264,7 @@ class NpmRecipeHandler(RecipeHandler): | |||
242 | value = origvalue.replace("version=" + data["version"], "version=${PV}") | 264 | value = origvalue.replace("version=" + data["version"], "version=${PV}") |
243 | value = value.replace("version=latest", "version=${PV}") | 265 | value = value.replace("version=latest", "version=${PV}") |
244 | values = [line.strip() for line in value.strip('\n').splitlines()] | 266 | values = [line.strip() for line in value.strip('\n').splitlines()] |
245 | if "dependencies" in shrinkwrap: | 267 | if "dependencies" in shrinkwrap.get("packages", {}).get("", {}): |
246 | values.append(url_recipe) | 268 | values.append(url_recipe) |
247 | return values, None, 4, False | 269 | return values, None, 4, False |
248 | 270 | ||
@@ -258,40 +280,19 @@ class NpmRecipeHandler(RecipeHandler): | |||
258 | fetcher.unpack(srctree) | 280 | fetcher.unpack(srctree) |
259 | 281 | ||
260 | bb.note("Handling licences ...") | 282 | bb.note("Handling licences ...") |
261 | (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) | 283 | (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev) |
262 | 284 | licvalues = match_licenses(licfiles, srctree, d) | |
263 | def _guess_odd_license(licfiles): | 285 | split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses) |
264 | import bb | 286 | fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist] |
265 | 287 | extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d) | |
266 | md5sums = get_license_md5sums(d, linenumbers=True) | 288 | extravalues["LICENSE"] = fallback_licenses_flat |
267 | |||
268 | chksums = [] | ||
269 | licenses = [] | ||
270 | for licfile in licfiles: | ||
271 | f = os.path.join(srctree, licfile) | ||
272 | md5value = bb.utils.md5_file(f) | ||
273 | (license, beginline, endline, md5) = md5sums.get(md5value, | ||
274 | (None, "", "", "")) | ||
275 | if not license: | ||
276 | license = "Unknown" | ||
277 | logger.info("Please add the following line for '%s' to a " | ||
278 | "'lib/recipetool/licenses.csv' and replace `Unknown`, " | ||
279 | "`X`, `Y` and `MD5` with the license, begin line, " | ||
280 | "end line and partial MD5 checksum:\n" \ | ||
281 | "%s,Unknown,X,Y,MD5" % (licfile, md5value)) | ||
282 | chksums.append("file://%s%s%s;md5=%s" % (licfile, | ||
283 | ";beginline=%s" % (beginline) if beginline else "", | ||
284 | ";endline=%s" % (endline) if endline else "", | ||
285 | md5 if md5 else md5value)) | ||
286 | licenses.append((license, licfile, md5value)) | ||
287 | return (licenses, chksums) | ||
288 | |||
289 | (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles) | ||
290 | split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after) | ||
291 | 289 | ||
292 | classes.append("npm") | 290 | classes.append("npm") |
293 | handled.append("buildsystem") | 291 | handled.append("buildsystem") |
294 | 292 | ||
293 | # Check if package has peer dependencies and inform the user | ||
294 | self._handle_peer_dependency(shrinkwrap_file) | ||
295 | |||
295 | return True | 296 | return True |
296 | 297 | ||
297 | def register_recipe_handlers(handlers): | 298 | def register_recipe_handlers(handlers): |
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv deleted file mode 100644 index 0d3fb0607b..0000000000 --- a/scripts/lib/recipetool/licenses.csv +++ /dev/null | |||
@@ -1,37 +0,0 @@ | |||
1 | 0636e73ff0215e8d672dc4c32c317bb3,GPLv2 | ||
2 | 12f884d2ae1ff87c09e5b7ccc2c4ca7e,GPLv2 | ||
3 | 18810669f13b87348459e611d31ab760,GPLv2 | ||
4 | 252890d9eee26aab7b432e8b8a616475,LGPLv2 | ||
5 | 2d5025d4aa3495befef8f17206a5b0a1,LGPLv2.1 | ||
6 | 3214f080875748938ba060314b4f727d,LGPLv2 | ||
7 | 385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0 | ||
8 | 393a5ca445f6965873eca0259a17f833,GPLv2 | ||
9 | 3b83ef96387f14655fc854ddc3c6bd57,Apache-2.0 | ||
10 | 3bf50002aefd002f49e7bb854063f7e7,LGPLv2 | ||
11 | 4325afd396febcb659c36b49533135d4,GPLv2 | ||
12 | 4fbd65380cdd255951079008b364516c,LGPLv2.1 | ||
13 | 54c7042be62e169199200bc6477f04d1,BSD-3-Clause | ||
14 | 55ca817ccb7d5b5b66355690e9abc605,LGPLv2 | ||
15 | 59530bdf33659b29e73d4adb9f9f6552,GPLv2 | ||
16 | 5f30f0716dfdd0d91eb439ebec522ec2,LGPLv2 | ||
17 | 6a6a8e020838b23406c81b19c1d46df6,LGPLv3 | ||
18 | 751419260aa954499f7abaabaa882bbe,GPLv2 | ||
19 | 7fbc338309ac38fefcd64b04bb903e34,LGPLv2.1 | ||
20 | 8ca43cbc842c2336e835926c2166c28b,GPLv2 | ||
21 | 94d55d512a9ba36caa9b7df079bae19f,GPLv2 | ||
22 | 9ac2e7cff1ddaf48b6eab6028f23ef88,GPLv2 | ||
23 | 9f604d8a4f8e74f4f5140845a21b6674,LGPLv2 | ||
24 | a6f89e2100d9b6cdffcea4f398e37343,LGPLv2.1 | ||
25 | b234ee4d69f5fce4486a80fdaf4a4263,GPLv2 | ||
26 | bbb461211a33b134d42ed5ee802b37ff,LGPLv2.1 | ||
27 | bfe1f75d606912a4111c90743d6c7325,MPL-1.1 | ||
28 | c93c0550bd3173f4504b2cbd8991e50b,GPLv2 | ||
29 | d32239bcb673463ab874e80d47fae504,GPLv3 | ||
30 | d7810fab7487fb0aad327b76f1be7cd7,GPLv2 | ||
31 | d8045f3b8f929c1cb29a1e3fd737b499,LGPLv2.1 | ||
32 | db979804f025cf55aabec7129cb671ed,LGPLv2 | ||
33 | eb723b61539feef013de476e68b5c50a,GPLv2 | ||
34 | ebb5c50ab7cab4baeffba14977030c07,GPLv2 | ||
35 | f27defe1e96c2e1ecd4e0c9be8967949,GPLv3 | ||
36 | fad9b3332be894bab9bc501572864b29,LGPLv2.1 | ||
37 | fbc093901857fcd118f065f900982c24,LGPLv2.1 | ||
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py index f8e2ee75fb..b5ad335cae 100644 --- a/scripts/lib/recipetool/setvar.py +++ b/scripts/lib/recipetool/setvar.py | |||
@@ -49,6 +49,7 @@ def setvar(args): | |||
49 | for patch in patches: | 49 | for patch in patches: |
50 | for line in patch: | 50 | for line in patch: |
51 | sys.stdout.write(line) | 51 | sys.stdout.write(line) |
52 | tinfoil.modified_files() | ||
52 | return 0 | 53 | return 0 |
53 | 54 | ||
54 | 55 | ||
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py new file mode 100644 index 0000000000..c7a53dc550 --- /dev/null +++ b/scripts/lib/resulttool/junit.py | |||
@@ -0,0 +1,77 @@ | |||
1 | # resulttool - report test results in JUnit XML format | ||
2 | # | ||
3 | # Copyright (c) 2024, Siemens AG. | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | import os | ||
9 | import re | ||
10 | import xml.etree.ElementTree as ET | ||
11 | import resulttool.resultutils as resultutils | ||
12 | |||
13 | def junit(args, logger): | ||
14 | testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map) | ||
15 | |||
16 | total_time = 0 | ||
17 | skipped = 0 | ||
18 | failures = 0 | ||
19 | errors = 0 | ||
20 | |||
21 | for tests in testresults.values(): | ||
22 | results = tests[next(reversed(tests))].get("result", {}) | ||
23 | |||
24 | for result_id, result in results.items(): | ||
25 | # filter out ptestresult.rawlogs and ptestresult.sections | ||
26 | if re.search(r'\.test_', result_id): | ||
27 | total_time += result.get("duration", 0) | ||
28 | |||
29 | if result['status'] == "FAILED": | ||
30 | failures += 1 | ||
31 | elif result['status'] == "ERROR": | ||
32 | errors += 1 | ||
33 | elif result['status'] == "SKIPPED": | ||
34 | skipped += 1 | ||
35 | |||
36 | testsuites_node = ET.Element("testsuites") | ||
37 | testsuites_node.set("time", "%s" % total_time) | ||
38 | testsuite_node = ET.SubElement(testsuites_node, "testsuite") | ||
39 | testsuite_node.set("name", "Testimage") | ||
40 | testsuite_node.set("time", "%s" % total_time) | ||
41 | testsuite_node.set("tests", "%s" % len(results)) | ||
42 | testsuite_node.set("failures", "%s" % failures) | ||
43 | testsuite_node.set("errors", "%s" % errors) | ||
44 | testsuite_node.set("skipped", "%s" % skipped) | ||
45 | |||
46 | for result_id, result in results.items(): | ||
47 | if re.search(r'\.test_', result_id): | ||
48 | testcase_node = ET.SubElement(testsuite_node, "testcase", { | ||
49 | "name": result_id, | ||
50 | "classname": "Testimage", | ||
51 | "time": str(result['duration']) | ||
52 | }) | ||
53 | if result['status'] == "SKIPPED": | ||
54 | ET.SubElement(testcase_node, "skipped", message=result['log']) | ||
55 | elif result['status'] == "FAILED": | ||
56 | ET.SubElement(testcase_node, "failure", message=result['log']) | ||
57 | elif result['status'] == "ERROR": | ||
58 | ET.SubElement(testcase_node, "error", message=result['log']) | ||
59 | |||
60 | tree = ET.ElementTree(testsuites_node) | ||
61 | |||
62 | if args.junit_xml_path is None: | ||
63 | args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml' | ||
64 | tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True) | ||
65 | |||
66 | logger.info('Saved JUnit XML report as %s' % args.junit_xml_path) | ||
67 | |||
68 | def register_commands(subparsers): | ||
69 | """Register subcommands from this plugin""" | ||
70 | parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format', | ||
71 | description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.', | ||
72 | group='analysis') | ||
73 | parser_build.set_defaults(func=junit) | ||
74 | parser_build.add_argument('json_file', | ||
75 | help='json file should point to the testresults.json') | ||
76 | parser_build.add_argument('-j', '--junit_xml_path', | ||
77 | help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml') | ||
diff --git a/scripts/lib/resulttool/log.py b/scripts/lib/resulttool/log.py index eb3927ec82..15148ca288 100644 --- a/scripts/lib/resulttool/log.py +++ b/scripts/lib/resulttool/log.py | |||
@@ -28,12 +28,10 @@ def show_reproducible(result, reproducible, logger): | |||
28 | def log(args, logger): | 28 | def log(args, logger): |
29 | results = resultutils.load_resultsdata(args.source) | 29 | results = resultutils.load_resultsdata(args.source) |
30 | 30 | ||
31 | ptest_count = sum(1 for _, _, _, r in resultutils.test_run_results(results) if 'ptestresult.sections' in r) | ||
32 | if ptest_count > 1 and not args.prepend_run: | ||
33 | print("%i ptest sections found. '--prepend-run' is required" % ptest_count) | ||
34 | return 1 | ||
35 | |||
36 | for _, run_name, _, r in resultutils.test_run_results(results): | 31 | for _, run_name, _, r in resultutils.test_run_results(results): |
32 | if args.list_ptest: | ||
33 | print('\n'.join(sorted(r['ptestresult.sections'].keys()))) | ||
34 | |||
37 | if args.dump_ptest: | 35 | if args.dump_ptest: |
38 | for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']: | 36 | for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']: |
39 | if sectname in r: | 37 | if sectname in r: |
@@ -48,6 +46,9 @@ def log(args, logger): | |||
48 | 46 | ||
49 | os.makedirs(dest_dir, exist_ok=True) | 47 | os.makedirs(dest_dir, exist_ok=True) |
50 | dest = os.path.join(dest_dir, '%s.log' % name) | 48 | dest = os.path.join(dest_dir, '%s.log' % name) |
49 | if os.path.exists(dest): | ||
50 | print("Overlapping ptest logs found, skipping %s. The '--prepend-run' option would avoid this" % name) | ||
51 | continue | ||
51 | print(dest) | 52 | print(dest) |
52 | with open(dest, 'w') as f: | 53 | with open(dest, 'w') as f: |
53 | f.write(logdata) | 54 | f.write(logdata) |
@@ -86,6 +87,8 @@ def register_commands(subparsers): | |||
86 | parser.set_defaults(func=log) | 87 | parser.set_defaults(func=log) |
87 | parser.add_argument('source', | 88 | parser.add_argument('source', |
88 | help='the results file/directory/URL to import') | 89 | help='the results file/directory/URL to import') |
90 | parser.add_argument('--list-ptest', action='store_true', | ||
91 | help='list the ptest test names') | ||
89 | parser.add_argument('--ptest', action='append', default=[], | 92 | parser.add_argument('--ptest', action='append', default=[], |
90 | help='show logs for a ptest') | 93 | help='show logs for a ptest') |
91 | parser.add_argument('--dump-ptest', metavar='DIR', | 94 | parser.add_argument('--dump-ptest', metavar='DIR', |
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py index ecb27c5933..ae0861ac6b 100755 --- a/scripts/lib/resulttool/manualexecution.py +++ b/scripts/lib/resulttool/manualexecution.py | |||
@@ -22,7 +22,7 @@ def load_json_file(f): | |||
22 | def write_json_file(f, json_data): | 22 | def write_json_file(f, json_data): |
23 | os.makedirs(os.path.dirname(f), exist_ok=True) | 23 | os.makedirs(os.path.dirname(f), exist_ok=True) |
24 | with open(f, 'w') as filedata: | 24 | with open(f, 'w') as filedata: |
25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=4)) | 25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=1)) |
26 | 26 | ||
27 | class ManualTestRunner(object): | 27 | class ManualTestRunner(object): |
28 | 28 | ||
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py index 9f952951b3..33b3119c54 100644 --- a/scripts/lib/resulttool/regression.py +++ b/scripts/lib/resulttool/regression.py | |||
@@ -7,17 +7,213 @@ | |||
7 | # | 7 | # |
8 | 8 | ||
9 | import resulttool.resultutils as resultutils | 9 | import resulttool.resultutils as resultutils |
10 | import json | ||
11 | 10 | ||
12 | from oeqa.utils.git import GitRepo | 11 | from oeqa.utils.git import GitRepo |
13 | import oeqa.utils.gitarchive as gitarchive | 12 | import oeqa.utils.gitarchive as gitarchive |
14 | 13 | ||
15 | def compare_result(logger, base_name, target_name, base_result, target_result): | 14 | METADATA_MATCH_TABLE = { |
15 | "oeselftest": "OESELFTEST_METADATA" | ||
16 | } | ||
17 | |||
18 | OESELFTEST_METADATA_GUESS_TABLE={ | ||
19 | "trigger-build-posttrigger": { | ||
20 | "run_all_tests": False, | ||
21 | "run_tests":["buildoptions.SourceMirroring.test_yocto_source_mirror"], | ||
22 | "skips": None, | ||
23 | "machine": None, | ||
24 | "select_tags":None, | ||
25 | "exclude_tags": None | ||
26 | }, | ||
27 | "reproducible": { | ||
28 | "run_all_tests": False, | ||
29 | "run_tests":["reproducible"], | ||
30 | "skips": None, | ||
31 | "machine": None, | ||
32 | "select_tags":None, | ||
33 | "exclude_tags": None | ||
34 | }, | ||
35 | "arch-qemu-quick": { | ||
36 | "run_all_tests": True, | ||
37 | "run_tests":None, | ||
38 | "skips": None, | ||
39 | "machine": None, | ||
40 | "select_tags":["machine"], | ||
41 | "exclude_tags": None | ||
42 | }, | ||
43 | "arch-qemu-full-x86-or-x86_64": { | ||
44 | "run_all_tests": True, | ||
45 | "run_tests":None, | ||
46 | "skips": None, | ||
47 | "machine": None, | ||
48 | "select_tags":["machine", "toolchain-system"], | ||
49 | "exclude_tags": None | ||
50 | }, | ||
51 | "arch-qemu-full-others": { | ||
52 | "run_all_tests": True, | ||
53 | "run_tests":None, | ||
54 | "skips": None, | ||
55 | "machine": None, | ||
56 | "select_tags":["machine", "toolchain-user"], | ||
57 | "exclude_tags": None | ||
58 | }, | ||
59 | "selftest": { | ||
60 | "run_all_tests": True, | ||
61 | "run_tests":None, | ||
62 | "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"], | ||
63 | "machine": None, | ||
64 | "select_tags":None, | ||
65 | "exclude_tags": ["machine", "toolchain-system", "toolchain-user"] | ||
66 | }, | ||
67 | "bringup": { | ||
68 | "run_all_tests": True, | ||
69 | "run_tests":None, | ||
70 | "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"], | ||
71 | "machine": None, | ||
72 | "select_tags":None, | ||
73 | "exclude_tags": ["machine", "toolchain-system", "toolchain-user"] | ||
74 | } | ||
75 | } | ||
76 | |||
77 | STATUS_STRINGS = { | ||
78 | "None": "No matching test result" | ||
79 | } | ||
80 | |||
81 | REGRESSIONS_DISPLAY_LIMIT=50 | ||
82 | |||
83 | MISSING_TESTS_BANNER = "-------------------------- Missing tests --------------------------" | ||
84 | ADDITIONAL_DATA_BANNER = "--------------------- Matches and improvements --------------------" | ||
85 | |||
86 | def test_has_at_least_one_matching_tag(test, tag_list): | ||
87 | return "oetags" in test and any(oetag in tag_list for oetag in test["oetags"]) | ||
88 | |||
89 | def all_tests_have_at_least_one_matching_tag(results, tag_list): | ||
90 | return all(test_has_at_least_one_matching_tag(test_result, tag_list) or test_name.startswith("ptestresult") for (test_name, test_result) in results.items()) | ||
91 | |||
92 | def any_test_have_any_matching_tag(results, tag_list): | ||
93 | return any(test_has_at_least_one_matching_tag(test, tag_list) for test in results.values()) | ||
94 | |||
95 | def have_skipped_test(result, test_prefix): | ||
96 | return all( result[test]['status'] == "SKIPPED" for test in result if test.startswith(test_prefix)) | ||
97 | |||
98 | def have_all_tests_skipped(result, test_prefixes_list): | ||
99 | return all(have_skipped_test(result, test_prefix) for test_prefix in test_prefixes_list) | ||
100 | |||
101 | def guess_oeselftest_metadata(results): | ||
102 | """ | ||
103 | When an oeselftest test result is lacking OESELFTEST_METADATA, we can try to guess it based on results content. | ||
104 | Check results for specific values (absence/presence of oetags, number and name of executed tests...), | ||
105 | and if it matches one of known configuration from autobuilder configuration, apply guessed OSELFTEST_METADATA | ||
106 | to it to allow proper test filtering. | ||
107 | This guessing process is tightly coupled to config.json in autobuilder. It should trigger less and less, | ||
108 | as new tests will have OESELFTEST_METADATA properly appended at test reporting time | ||
109 | """ | ||
110 | |||
111 | if len(results) == 1 and "buildoptions.SourceMirroring.test_yocto_source_mirror" in results: | ||
112 | return OESELFTEST_METADATA_GUESS_TABLE['trigger-build-posttrigger'] | ||
113 | elif all(result.startswith("reproducible") for result in results): | ||
114 | return OESELFTEST_METADATA_GUESS_TABLE['reproducible'] | ||
115 | elif all_tests_have_at_least_one_matching_tag(results, ["machine"]): | ||
116 | return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-quick'] | ||
117 | elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-system"]): | ||
118 | return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-x86-or-x86_64'] | ||
119 | elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-user"]): | ||
120 | return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-others'] | ||
121 | elif not any_test_have_any_matching_tag(results, ["machine", "toolchain-user", "toolchain-system"]): | ||
122 | if have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"]): | ||
123 | return OESELFTEST_METADATA_GUESS_TABLE['selftest'] | ||
124 | elif have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"]): | ||
125 | return OESELFTEST_METADATA_GUESS_TABLE['bringup'] | ||
126 | |||
127 | return None | ||
128 | |||
129 | |||
130 | def metadata_matches(base_configuration, target_configuration): | ||
131 | """ | ||
132 | For passed base and target, check test type. If test type matches one of | ||
133 | properties described in METADATA_MATCH_TABLE, compare metadata if it is | ||
134 | present in base. Return true if metadata matches, or if base lacks some | ||
135 | data (either TEST_TYPE or the corresponding metadata) | ||
136 | """ | ||
137 | test_type = base_configuration.get('TEST_TYPE') | ||
138 | if test_type not in METADATA_MATCH_TABLE: | ||
139 | return True | ||
140 | |||
141 | metadata_key = METADATA_MATCH_TABLE.get(test_type) | ||
142 | if target_configuration.get(metadata_key) != base_configuration.get(metadata_key): | ||
143 | return False | ||
144 | |||
145 | return True | ||
146 | |||
147 | |||
148 | def machine_matches(base_configuration, target_configuration): | ||
149 | return base_configuration.get('MACHINE') == target_configuration.get('MACHINE') | ||
150 | |||
151 | |||
152 | def can_be_compared(logger, base, target): | ||
153 | """ | ||
154 | Some tests are not relevant to be compared, for example some oeselftest | ||
155 | run with different tests sets or parameters. Return true if tests can be | ||
156 | compared | ||
157 | """ | ||
158 | ret = True | ||
159 | base_configuration = base['configuration'] | ||
160 | target_configuration = target['configuration'] | ||
161 | |||
162 | # Older test results lack proper OESELFTEST_METADATA: if not present, try to guess it based on tests results. | ||
163 | if base_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in base_configuration: | ||
164 | guess = guess_oeselftest_metadata(base['result']) | ||
165 | if guess is None: | ||
166 | logger.error(f"ERROR: did not manage to guess oeselftest metadata for {base_configuration['STARTTIME']}") | ||
167 | else: | ||
168 | logger.debug(f"Enriching {base_configuration['STARTTIME']} with {guess}") | ||
169 | base_configuration['OESELFTEST_METADATA'] = guess | ||
170 | if target_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in target_configuration: | ||
171 | guess = guess_oeselftest_metadata(target['result']) | ||
172 | if guess is None: | ||
173 | logger.error(f"ERROR: did not manage to guess oeselftest metadata for {target_configuration['STARTTIME']}") | ||
174 | else: | ||
175 | logger.debug(f"Enriching {target_configuration['STARTTIME']} with {guess}") | ||
176 | target_configuration['OESELFTEST_METADATA'] = guess | ||
177 | |||
178 | # Test runs with LTP results in should only be compared with other runs with LTP tests in them | ||
179 | if base_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in base['result']): | ||
180 | ret = target_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in target['result']) | ||
181 | |||
182 | return ret and metadata_matches(base_configuration, target_configuration) \ | ||
183 | and machine_matches(base_configuration, target_configuration) | ||
184 | |||
185 | def get_status_str(raw_status): | ||
186 | raw_status_lower = raw_status.lower() if raw_status else "None" | ||
187 | return STATUS_STRINGS.get(raw_status_lower, raw_status) | ||
188 | |||
189 | def get_additional_info_line(new_pass_count, new_tests): | ||
190 | result=[] | ||
191 | if new_tests: | ||
192 | result.append(f'+{new_tests} test(s) present') | ||
193 | if new_pass_count: | ||
194 | result.append(f'+{new_pass_count} test(s) now passing') | ||
195 | |||
196 | if not result: | ||
197 | return "" | ||
198 | |||
199 | return ' -> ' + ', '.join(result) + '\n' | ||
200 | |||
201 | def compare_result(logger, base_name, target_name, base_result, target_result, display_limit=None): | ||
16 | base_result = base_result.get('result') | 202 | base_result = base_result.get('result') |
17 | target_result = target_result.get('result') | 203 | target_result = target_result.get('result') |
18 | result = {} | 204 | result = {} |
205 | new_tests = 0 | ||
206 | regressions = {} | ||
207 | resultstring = "" | ||
208 | new_tests = 0 | ||
209 | new_pass_count = 0 | ||
210 | |||
211 | display_limit = int(display_limit) if display_limit else REGRESSIONS_DISPLAY_LIMIT | ||
212 | |||
19 | if base_result and target_result: | 213 | if base_result and target_result: |
20 | for k in base_result: | 214 | for k in base_result: |
215 | if k in ['ptestresult.rawlogs', 'ptestresult.sections']: | ||
216 | continue | ||
21 | base_testcase = base_result[k] | 217 | base_testcase = base_result[k] |
22 | base_status = base_testcase.get('status') | 218 | base_status = base_testcase.get('status') |
23 | if base_status: | 219 | if base_status: |
@@ -27,12 +223,47 @@ def compare_result(logger, base_name, target_name, base_result, target_result): | |||
27 | result[k] = {'base': base_status, 'target': target_status} | 223 | result[k] = {'base': base_status, 'target': target_status} |
28 | else: | 224 | else: |
29 | logger.error('Failed to retrieved base test case status: %s' % k) | 225 | logger.error('Failed to retrieved base test case status: %s' % k) |
226 | |||
227 | # Also count new tests that were not present in base results: it | ||
228 | # could be newly added tests, but it could also highlights some tests | ||
229 | # renames or fixed faulty ptests | ||
230 | for k in target_result: | ||
231 | if k not in base_result: | ||
232 | new_tests += 1 | ||
30 | if result: | 233 | if result: |
31 | resultstring = "Regression: %s\n %s\n" % (base_name, target_name) | 234 | new_pass_count = sum(test['target'] is not None and test['target'].startswith("PASS") for test in result.values()) |
32 | for k in sorted(result): | 235 | # Print a regression report only if at least one test has a regression status (FAIL, SKIPPED, absent...) |
33 | resultstring += ' %s: %s -> %s\n' % (k, result[k]['base'], result[k]['target']) | 236 | if new_pass_count < len(result): |
237 | resultstring = "Regression: %s\n %s\n" % (base_name, target_name) | ||
238 | for k in sorted(result): | ||
239 | if not result[k]['target'] or not result[k]['target'].startswith("PASS"): | ||
240 | # Differentiate each ptest kind when listing regressions | ||
241 | key_parts = k.split('.') | ||
242 | key = '.'.join(key_parts[:2]) if k.startswith('ptest') else key_parts[0] | ||
243 | # Append new regression to corresponding test family | ||
244 | regressions[key] = regressions.setdefault(key, []) + [' %s: %s -> %s\n' % (k, get_status_str(result[k]['base']), get_status_str(result[k]['target']))] | ||
245 | resultstring += f" Total: {sum([len(regressions[r]) for r in regressions])} new regression(s):\n" | ||
246 | for k in regressions: | ||
247 | resultstring += f" {len(regressions[k])} regression(s) for {k}\n" | ||
248 | count_to_print=min([display_limit, len(regressions[k])]) if display_limit > 0 else len(regressions[k]) | ||
249 | resultstring += ''.join(regressions[k][:count_to_print]) | ||
250 | if count_to_print < len(regressions[k]): | ||
251 | resultstring+=' [...]\n' | ||
252 | if new_pass_count > 0: | ||
253 | resultstring += f' Additionally, {new_pass_count} previously failing test(s) is/are now passing\n' | ||
254 | if new_tests > 0: | ||
255 | resultstring += f' Additionally, {new_tests} new test(s) is/are present\n' | ||
256 | else: | ||
257 | resultstring = "%s\n%s\n" % (base_name, target_name) | ||
258 | result = None | ||
34 | else: | 259 | else: |
35 | resultstring = "Match: %s\n %s" % (base_name, target_name) | 260 | resultstring = "%s\n%s\n" % (base_name, target_name) |
261 | |||
262 | if not result: | ||
263 | additional_info = get_additional_info_line(new_pass_count, new_tests) | ||
264 | if additional_info: | ||
265 | resultstring += additional_info | ||
266 | |||
36 | return result, resultstring | 267 | return result, resultstring |
37 | 268 | ||
38 | def get_results(logger, source): | 269 | def get_results(logger, source): |
@@ -44,12 +275,38 @@ def regression(args, logger): | |||
44 | 275 | ||
45 | regression_common(args, logger, base_results, target_results) | 276 | regression_common(args, logger, base_results, target_results) |
46 | 277 | ||
278 | # Some test case naming is poor and contains random strings, particularly lttng/babeltrace. | ||
279 | # Truncating the test names works since they contain file and line number identifiers | ||
280 | # which allows us to match them without the random components. | ||
281 | def fixup_ptest_names(results, logger): | ||
282 | for r in results: | ||
283 | for i in results[r]: | ||
284 | tests = list(results[r][i]['result'].keys()) | ||
285 | for test in tests: | ||
286 | new = None | ||
287 | if test.startswith(("ptestresult.lttng-tools.", "ptestresult.babeltrace.", "ptestresult.babeltrace2")) and "_-_" in test: | ||
288 | new = test.split("_-_")[0] | ||
289 | elif test.startswith(("ptestresult.curl.")) and "__" in test: | ||
290 | new = test.split("__")[0] | ||
291 | elif test.startswith(("ptestresult.dbus.")) and "__" in test: | ||
292 | new = test.split("__")[0] | ||
293 | elif test.startswith("ptestresult.binutils") and "build-st-" in test: | ||
294 | new = test.split(" ")[0] | ||
295 | elif test.startswith("ptestresult.gcc") and "/tmp/runtest." in test: | ||
296 | new = ".".join(test.split(".")[:2]) | ||
297 | if new: | ||
298 | results[r][i]['result'][new] = results[r][i]['result'][test] | ||
299 | del results[r][i]['result'][test] | ||
300 | |||
47 | def regression_common(args, logger, base_results, target_results): | 301 | def regression_common(args, logger, base_results, target_results): |
48 | if args.base_result_id: | 302 | if args.base_result_id: |
49 | base_results = resultutils.filter_resultsdata(base_results, args.base_result_id) | 303 | base_results = resultutils.filter_resultsdata(base_results, args.base_result_id) |
50 | if args.target_result_id: | 304 | if args.target_result_id: |
51 | target_results = resultutils.filter_resultsdata(target_results, args.target_result_id) | 305 | target_results = resultutils.filter_resultsdata(target_results, args.target_result_id) |
52 | 306 | ||
307 | fixup_ptest_names(base_results, logger) | ||
308 | fixup_ptest_names(target_results, logger) | ||
309 | |||
53 | matches = [] | 310 | matches = [] |
54 | regressions = [] | 311 | regressions = [] |
55 | notfound = [] | 312 | notfound = [] |
@@ -62,7 +319,9 @@ def regression_common(args, logger, base_results, target_results): | |||
62 | # removing any pairs which match | 319 | # removing any pairs which match |
63 | for c in base.copy(): | 320 | for c in base.copy(): |
64 | for b in target.copy(): | 321 | for b in target.copy(): |
65 | res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b]) | 322 | if not can_be_compared(logger, base_results[a][c], target_results[a][b]): |
323 | continue | ||
324 | res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit) | ||
66 | if not res: | 325 | if not res: |
67 | matches.append(resstr) | 326 | matches.append(resstr) |
68 | base.remove(c) | 327 | base.remove(c) |
@@ -71,15 +330,18 @@ def regression_common(args, logger, base_results, target_results): | |||
71 | # Should only now see regressions, we may not be able to match multiple pairs directly | 330 | # Should only now see regressions, we may not be able to match multiple pairs directly |
72 | for c in base: | 331 | for c in base: |
73 | for b in target: | 332 | for b in target: |
74 | res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b]) | 333 | if not can_be_compared(logger, base_results[a][c], target_results[a][b]): |
334 | continue | ||
335 | res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit) | ||
75 | if res: | 336 | if res: |
76 | regressions.append(resstr) | 337 | regressions.append(resstr) |
77 | else: | 338 | else: |
78 | notfound.append("%s not found in target" % a) | 339 | notfound.append("%s not found in target" % a) |
79 | print("\n".join(sorted(matches))) | ||
80 | print("\n".join(sorted(regressions))) | 340 | print("\n".join(sorted(regressions))) |
341 | print("\n" + MISSING_TESTS_BANNER + "\n") | ||
81 | print("\n".join(sorted(notfound))) | 342 | print("\n".join(sorted(notfound))) |
82 | 343 | print("\n" + ADDITIONAL_DATA_BANNER + "\n") | |
344 | print("\n".join(sorted(matches))) | ||
83 | return 0 | 345 | return 0 |
84 | 346 | ||
85 | def regression_git(args, logger): | 347 | def regression_git(args, logger): |
@@ -162,6 +424,7 @@ def register_commands(subparsers): | |||
162 | help='(optional) filter the base results to this result ID') | 424 | help='(optional) filter the base results to this result ID') |
163 | parser_build.add_argument('-t', '--target-result-id', default='', | 425 | parser_build.add_argument('-t', '--target-result-id', default='', |
164 | help='(optional) filter the target results to this result ID') | 426 | help='(optional) filter the target results to this result ID') |
427 | parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes") | ||
165 | 428 | ||
166 | parser_build = subparsers.add_parser('regression-git', help='regression git analysis', | 429 | parser_build = subparsers.add_parser('regression-git', help='regression git analysis', |
167 | description='regression analysis comparing base result set to target ' | 430 | description='regression analysis comparing base result set to target ' |
@@ -183,4 +446,5 @@ def register_commands(subparsers): | |||
183 | parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified") | 446 | parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified") |
184 | parser_build.add_argument('--commit2', help="Revision to compare with") | 447 | parser_build.add_argument('--commit2', help="Revision to compare with") |
185 | parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified") | 448 | parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified") |
449 | parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes") | ||
186 | 450 | ||
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py index f0ca50ebe2..1c100b00ab 100644 --- a/scripts/lib/resulttool/report.py +++ b/scripts/lib/resulttool/report.py | |||
@@ -176,7 +176,10 @@ class ResultsTextReport(object): | |||
176 | vals['sort'] = line['testseries'] + "_" + line['result_id'] | 176 | vals['sort'] = line['testseries'] + "_" + line['result_id'] |
177 | vals['failed_testcases'] = line['failed_testcases'] | 177 | vals['failed_testcases'] = line['failed_testcases'] |
178 | for k in cols: | 178 | for k in cols: |
179 | vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f')) | 179 | if total_tested: |
180 | vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f')) | ||
181 | else: | ||
182 | vals[k] = "0 (0%)" | ||
180 | for k in maxlen: | 183 | for k in maxlen: |
181 | if k in vals and len(vals[k]) > maxlen[k]: | 184 | if k in vals and len(vals[k]) > maxlen[k]: |
182 | maxlen[k] = len(vals[k]) | 185 | maxlen[k] = len(vals[k]) |
@@ -253,7 +256,7 @@ class ResultsTextReport(object): | |||
253 | if selected_test_case_only: | 256 | if selected_test_case_only: |
254 | print_selected_testcase_result(raw_results, selected_test_case_only) | 257 | print_selected_testcase_result(raw_results, selected_test_case_only) |
255 | else: | 258 | else: |
256 | print(json.dumps(raw_results, sort_keys=True, indent=4)) | 259 | print(json.dumps(raw_results, sort_keys=True, indent=1)) |
257 | else: | 260 | else: |
258 | print('Could not find raw test result for %s' % raw_test) | 261 | print('Could not find raw test result for %s' % raw_test) |
259 | return 0 | 262 | return 0 |
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py index 8917022d36..b8fc79a6ac 100644 --- a/scripts/lib/resulttool/resultutils.py +++ b/scripts/lib/resulttool/resultutils.py | |||
@@ -14,8 +14,11 @@ import scriptpath | |||
14 | import copy | 14 | import copy |
15 | import urllib.request | 15 | import urllib.request |
16 | import posixpath | 16 | import posixpath |
17 | import logging | ||
17 | scriptpath.add_oe_lib_path() | 18 | scriptpath.add_oe_lib_path() |
18 | 19 | ||
20 | logger = logging.getLogger('resulttool') | ||
21 | |||
19 | flatten_map = { | 22 | flatten_map = { |
20 | "oeselftest": [], | 23 | "oeselftest": [], |
21 | "runtime": [], | 24 | "runtime": [], |
@@ -31,13 +34,19 @@ regression_map = { | |||
31 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] | 34 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] |
32 | } | 35 | } |
33 | store_map = { | 36 | store_map = { |
34 | "oeselftest": ['TEST_TYPE'], | 37 | "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'], |
35 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], | 38 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], |
36 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 39 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
37 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 40 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
38 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] | 41 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] |
39 | } | 42 | } |
40 | 43 | ||
44 | rawlog_sections = { | ||
45 | "ptestresult.rawlogs": "ptest", | ||
46 | "ltpresult.rawlogs": "ltp", | ||
47 | "ltpposixresult.rawlogs": "ltpposix" | ||
48 | } | ||
49 | |||
41 | def is_url(p): | 50 | def is_url(p): |
42 | """ | 51 | """ |
43 | Helper for determining if the given path is a URL | 52 | Helper for determining if the given path is a URL |
@@ -58,7 +67,11 @@ def append_resultsdata(results, f, configmap=store_map, configvars=extra_configv | |||
58 | testseries = posixpath.basename(posixpath.dirname(url.path)) | 67 | testseries = posixpath.basename(posixpath.dirname(url.path)) |
59 | else: | 68 | else: |
60 | with open(f, "r") as filedata: | 69 | with open(f, "r") as filedata: |
61 | data = json.load(filedata) | 70 | try: |
71 | data = json.load(filedata) | ||
72 | except json.decoder.JSONDecodeError: | ||
73 | print("Cannot decode {}. Possible corruption. Skipping.".format(f)) | ||
74 | data = "" | ||
62 | testseries = os.path.basename(os.path.dirname(f)) | 75 | testseries = os.path.basename(os.path.dirname(f)) |
63 | else: | 76 | else: |
64 | data = f | 77 | data = f |
@@ -104,21 +117,57 @@ def filter_resultsdata(results, resultid): | |||
104 | newresults[r][i] = results[r][i] | 117 | newresults[r][i] = results[r][i] |
105 | return newresults | 118 | return newresults |
106 | 119 | ||
107 | def strip_ptestresults(results): | 120 | def strip_logs(results): |
108 | newresults = copy.deepcopy(results) | 121 | newresults = copy.deepcopy(results) |
109 | #for a in newresults2: | ||
110 | # newresults = newresults2[a] | ||
111 | for res in newresults: | 122 | for res in newresults: |
112 | if 'result' not in newresults[res]: | 123 | if 'result' not in newresults[res]: |
113 | continue | 124 | continue |
114 | if 'ptestresult.rawlogs' in newresults[res]['result']: | 125 | for logtype in rawlog_sections: |
115 | del newresults[res]['result']['ptestresult.rawlogs'] | 126 | if logtype in newresults[res]['result']: |
127 | del newresults[res]['result'][logtype] | ||
116 | if 'ptestresult.sections' in newresults[res]['result']: | 128 | if 'ptestresult.sections' in newresults[res]['result']: |
117 | for i in newresults[res]['result']['ptestresult.sections']: | 129 | for i in newresults[res]['result']['ptestresult.sections']: |
118 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: | 130 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: |
119 | del newresults[res]['result']['ptestresult.sections'][i]['log'] | 131 | del newresults[res]['result']['ptestresult.sections'][i]['log'] |
120 | return newresults | 132 | return newresults |
121 | 133 | ||
134 | # For timing numbers, crazy amounts of precision don't make sense and just confuse | ||
135 | # the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1, | ||
136 | # trim to 4 significant digits | ||
137 | def trim_durations(results): | ||
138 | for res in results: | ||
139 | if 'result' not in results[res]: | ||
140 | continue | ||
141 | for entry in results[res]['result']: | ||
142 | if 'duration' in results[res]['result'][entry]: | ||
143 | duration = results[res]['result'][entry]['duration'] | ||
144 | if duration > 1: | ||
145 | results[res]['result'][entry]['duration'] = float("%.3f" % duration) | ||
146 | elif duration < 1: | ||
147 | results[res]['result'][entry]['duration'] = float("%.4g" % duration) | ||
148 | return results | ||
149 | |||
150 | def handle_cleanups(results): | ||
151 | # Remove pointless path duplication from old format reproducibility results | ||
152 | for res2 in results: | ||
153 | try: | ||
154 | section = results[res2]['result']['reproducible']['files'] | ||
155 | for pkgtype in section: | ||
156 | for filelist in section[pkgtype].copy(): | ||
157 | if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict: | ||
158 | newlist = [] | ||
159 | for entry in section[pkgtype][filelist]: | ||
160 | newlist.append(entry["reference"].split("/./")[1]) | ||
161 | section[pkgtype][filelist] = newlist | ||
162 | |||
163 | except KeyError: | ||
164 | pass | ||
165 | # Remove pointless duplicate rawlogs data | ||
166 | try: | ||
167 | del results[res2]['result']['reproducible.rawlogs'] | ||
168 | except KeyError: | ||
169 | pass | ||
170 | |||
122 | def decode_log(logdata): | 171 | def decode_log(logdata): |
123 | if isinstance(logdata, str): | 172 | if isinstance(logdata, str): |
124 | return logdata | 173 | return logdata |
@@ -142,7 +191,7 @@ def generic_get_log(sectionname, results, section): | |||
142 | return decode_log(ptest['log']) | 191 | return decode_log(ptest['log']) |
143 | 192 | ||
144 | def ptestresult_get_log(results, section): | 193 | def ptestresult_get_log(results, section): |
145 | return generic_get_log('ptestresuls.sections', results, section) | 194 | return generic_get_log('ptestresult.sections', results, section) |
146 | 195 | ||
147 | def generic_get_rawlogs(sectname, results): | 196 | def generic_get_rawlogs(sectname, results): |
148 | if sectname not in results: | 197 | if sectname not in results: |
@@ -151,9 +200,6 @@ def generic_get_rawlogs(sectname, results): | |||
151 | return None | 200 | return None |
152 | return decode_log(results[sectname]['log']) | 201 | return decode_log(results[sectname]['log']) |
153 | 202 | ||
154 | def ptestresult_get_rawlogs(results): | ||
155 | return generic_get_rawlogs('ptestresult.rawlogs', results) | ||
156 | |||
157 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): | 203 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): |
158 | for res in results: | 204 | for res in results: |
159 | if res: | 205 | if res: |
@@ -163,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p | |||
163 | os.makedirs(os.path.dirname(dst), exist_ok=True) | 209 | os.makedirs(os.path.dirname(dst), exist_ok=True) |
164 | resultsout = results[res] | 210 | resultsout = results[res] |
165 | if not ptestjson: | 211 | if not ptestjson: |
166 | resultsout = strip_ptestresults(results[res]) | 212 | resultsout = strip_logs(results[res]) |
213 | trim_durations(resultsout) | ||
214 | handle_cleanups(resultsout) | ||
167 | with open(dst, 'w') as f: | 215 | with open(dst, 'w') as f: |
168 | f.write(json.dumps(resultsout, sort_keys=True, indent=4)) | 216 | f.write(json.dumps(resultsout, sort_keys=True, indent=1)) |
169 | for res2 in results[res]: | 217 | for res2 in results[res]: |
170 | if ptestlogs and 'result' in results[res][res2]: | 218 | if ptestlogs and 'result' in results[res][res2]: |
171 | seriesresults = results[res][res2]['result'] | 219 | seriesresults = results[res][res2]['result'] |
172 | rawlogs = ptestresult_get_rawlogs(seriesresults) | 220 | for logtype in rawlog_sections: |
173 | if rawlogs is not None: | 221 | logdata = generic_get_rawlogs(logtype, seriesresults) |
174 | with open(dst.replace(fn, "ptest-raw.log"), "w+") as f: | 222 | if logdata is not None: |
175 | f.write(rawlogs) | 223 | logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log") |
224 | with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f: | ||
225 | f.write(logdata) | ||
176 | if 'ptestresult.sections' in seriesresults: | 226 | if 'ptestresult.sections' in seriesresults: |
177 | for i in seriesresults['ptestresult.sections']: | 227 | for i in seriesresults['ptestresult.sections']: |
178 | sectionlog = ptestresult_get_log(seriesresults, i) | 228 | sectionlog = ptestresult_get_log(seriesresults, i) |
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py index e0951f0a8f..b143334e69 100644 --- a/scripts/lib/resulttool/store.py +++ b/scripts/lib/resulttool/store.py | |||
@@ -65,18 +65,35 @@ def store(args, logger): | |||
65 | 65 | ||
66 | for r in revisions: | 66 | for r in revisions: |
67 | results = revisions[r] | 67 | results = revisions[r] |
68 | if args.revision and r[0] != args.revision: | ||
69 | logger.info('skipping %s as non-matching' % r[0]) | ||
70 | continue | ||
68 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} | 71 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} |
69 | subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"]) | 72 | subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"]) |
70 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) | 73 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) |
71 | 74 | ||
72 | logger.info('Storing test result into git repository %s' % args.git_dir) | 75 | logger.info('Storing test result into git repository %s' % args.git_dir) |
73 | 76 | ||
74 | gitarchive.gitarchive(tempdir, args.git_dir, False, False, | 77 | excludes = [] |
78 | if args.logfile_archive: | ||
79 | excludes = ['*.log', "*.log.zst"] | ||
80 | |||
81 | tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False, | ||
75 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", | 82 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", |
76 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", | 83 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", |
77 | 'Test run #{tag_number} of {branch}:{commit}', '', | 84 | 'Test run #{tag_number} of {branch}:{commit}', '', |
78 | [], [], False, keywords, logger) | 85 | excludes, [], False, keywords, logger) |
79 | 86 | ||
87 | if args.logfile_archive: | ||
88 | logdir = args.logfile_archive + "/" + tagname | ||
89 | shutil.copytree(tempdir, logdir) | ||
90 | os.chmod(logdir, 0o755) | ||
91 | for root, dirs, files in os.walk(logdir): | ||
92 | for name in files: | ||
93 | if not name.endswith(".log"): | ||
94 | continue | ||
95 | f = os.path.join(root, name) | ||
96 | subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True) | ||
80 | finally: | 97 | finally: |
81 | subprocess.check_call(["rm", "-rf", tempdir]) | 98 | subprocess.check_call(["rm", "-rf", tempdir]) |
82 | 99 | ||
@@ -102,3 +119,7 @@ def register_commands(subparsers): | |||
102 | help='add executed-by configuration to each result file') | 119 | help='add executed-by configuration to each result file') |
103 | parser_build.add_argument('-t', '--extra-test-env', default='', | 120 | parser_build.add_argument('-t', '--extra-test-env', default='', |
104 | help='add extra test environment data to each result file configuration') | 121 | help='add extra test environment data to each result file configuration') |
122 | parser_build.add_argument('-r', '--revision', default='', | ||
123 | help='only store data for the specified revision') | ||
124 | parser_build.add_argument('-l', '--logfile-archive', default='', | ||
125 | help='directory to separately archive log files along with a copy of the results') | ||
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index 3164171eb2..32e749dbb1 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
@@ -5,7 +5,6 @@ | |||
5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
6 | # | 6 | # |
7 | 7 | ||
8 | import argparse | ||
9 | import glob | 8 | import glob |
10 | import logging | 9 | import logging |
11 | import os | 10 | import os |
@@ -18,13 +17,14 @@ import sys | |||
18 | import tempfile | 17 | import tempfile |
19 | import threading | 18 | import threading |
20 | import importlib | 19 | import importlib |
21 | from importlib import machinery | 20 | import importlib.machinery |
21 | import importlib.util | ||
22 | 22 | ||
23 | class KeepAliveStreamHandler(logging.StreamHandler): | 23 | class KeepAliveStreamHandler(logging.StreamHandler): |
24 | def __init__(self, keepalive=True, **kwargs): | 24 | def __init__(self, keepalive=True, **kwargs): |
25 | super().__init__(**kwargs) | 25 | super().__init__(**kwargs) |
26 | if keepalive is True: | 26 | if keepalive is True: |
27 | keepalive = 5000 # default timeout | 27 | keepalive = 5000 # default timeout |
28 | self._timeout = threading.Condition() | 28 | self._timeout = threading.Condition() |
29 | self._stop = False | 29 | self._stop = False |
30 | 30 | ||
@@ -35,9 +35,9 @@ class KeepAliveStreamHandler(logging.StreamHandler): | |||
35 | with self._timeout: | 35 | with self._timeout: |
36 | if not self._timeout.wait(keepalive): | 36 | if not self._timeout.wait(keepalive): |
37 | self.emit(logging.LogRecord("keepalive", logging.INFO, | 37 | self.emit(logging.LogRecord("keepalive", logging.INFO, |
38 | None, None, "Keepalive message", None, None)) | 38 | None, None, "Keepalive message", None, None)) |
39 | 39 | ||
40 | self._thread = threading.Thread(target = thread, daemon = True) | 40 | self._thread = threading.Thread(target=thread, daemon=True) |
41 | self._thread.start() | 41 | self._thread.start() |
42 | 42 | ||
43 | def close(self): | 43 | def close(self): |
@@ -71,18 +71,19 @@ def logger_setup_color(logger, color='auto'): | |||
71 | 71 | ||
72 | for handler in logger.handlers: | 72 | for handler in logger.handlers: |
73 | if (isinstance(handler, logging.StreamHandler) and | 73 | if (isinstance(handler, logging.StreamHandler) and |
74 | isinstance(handler.formatter, BBLogFormatter)): | 74 | isinstance(handler.formatter, BBLogFormatter)): |
75 | if color == 'always' or (color == 'auto' and handler.stream.isatty()): | 75 | if color == 'always' or (color == 'auto' and handler.stream.isatty()): |
76 | handler.formatter.enable_color() | 76 | handler.formatter.enable_color() |
77 | 77 | ||
78 | 78 | ||
79 | def load_plugins(logger, plugins, pluginpath): | 79 | def load_plugins(logger, plugins, pluginpath): |
80 | |||
81 | def load_plugin(name): | 80 | def load_plugin(name): |
82 | logger.debug('Loading plugin %s' % name) | 81 | logger.debug('Loading plugin %s' % name) |
83 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) | 82 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath]) |
84 | if spec: | 83 | if spec: |
85 | return spec.loader.load_module() | 84 | mod = importlib.util.module_from_spec(spec) |
85 | spec.loader.exec_module(mod) | ||
86 | return mod | ||
86 | 87 | ||
87 | def plugin_name(filename): | 88 | def plugin_name(filename): |
88 | return os.path.splitext(os.path.basename(filename))[0] | 89 | return os.path.splitext(os.path.basename(filename))[0] |
@@ -176,9 +177,15 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
176 | f.write('BB_STRICT_CHECKSUM = "ignore"\n') | 177 | f.write('BB_STRICT_CHECKSUM = "ignore"\n') |
177 | f.write('SRC_URI = "%s"\n' % srcuri) | 178 | f.write('SRC_URI = "%s"\n' % srcuri) |
178 | f.write('SRCREV = "%s"\n' % srcrev) | 179 | f.write('SRCREV = "%s"\n' % srcrev) |
180 | f.write('PV = "0.0+"\n') | ||
179 | f.write('WORKDIR = "%s"\n' % tmpworkdir) | 181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) |
182 | f.write('UNPACKDIR = "%s"\n' % destdir) | ||
183 | |||
180 | # Set S out of the way so it doesn't get created under the workdir | 184 | # Set S out of the way so it doesn't get created under the workdir |
181 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) | 185 | s_dir = os.path.join(tmpdir, 'emptysrc') |
186 | bb.utils.mkdirhier(s_dir) | ||
187 | f.write('S = "%s"\n' % s_dir) | ||
188 | |||
182 | if not mirrors: | 189 | if not mirrors: |
183 | # We do not need PREMIRRORS since we are almost certainly | 190 | # We do not need PREMIRRORS since we are almost certainly |
184 | # fetching new source rather than something that has already | 191 | # fetching new source rather than something that has already |
@@ -230,10 +237,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
230 | if e.errno != errno.ENOTEMPTY: | 237 | if e.errno != errno.ENOTEMPTY: |
231 | raise | 238 | raise |
232 | 239 | ||
233 | bb.utils.mkdirhier(destdir) | ||
234 | for fn in os.listdir(tmpworkdir): | ||
235 | shutil.move(os.path.join(tmpworkdir, fn), destdir) | ||
236 | |||
237 | finally: | 240 | finally: |
238 | if not preserve_tmp: | 241 | if not preserve_tmp: |
239 | shutil.rmtree(tmpdir) | 242 | shutil.rmtree(tmpdir) |
@@ -269,12 +272,3 @@ def is_src_url(param): | |||
269 | return True | 272 | return True |
270 | return False | 273 | return False |
271 | 274 | ||
272 | def filter_src_subdirs(pth): | ||
273 | """ | ||
274 | Filter out subdirectories of initial unpacked source trees that we do not care about. | ||
275 | Used by devtool and recipetool. | ||
276 | """ | ||
277 | dirlist = os.listdir(pth) | ||
278 | filterout = ['git.indirectionsymlink', 'source-date-epoch'] | ||
279 | dirlist = [x for x in dirlist if x not in filterout] | ||
280 | return dirlist | ||
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc index 89880b417b..4a440ddafe 100644 --- a/scripts/lib/wic/canned-wks/common.wks.inc +++ b/scripts/lib/wic/canned-wks/common.wks.inc | |||
@@ -1,3 +1,3 @@ | |||
1 | # This file is included into 3 canned wks files from this directory | 1 | # This file is included into 3 canned wks files from this directory |
2 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 2 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
3 | part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 | 3 | part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 |
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks index 8d7d8de6ea..cb640056f1 100644 --- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks +++ b/scripts/lib/wic/canned-wks/directdisk-gpt.wks | |||
@@ -3,7 +3,7 @@ | |||
3 | # can directly dd to boot media. | 3 | # can directly dd to boot media. |
4 | 4 | ||
5 | 5 | ||
6 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 6 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
9 | bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" | 9 | bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" |
diff --git a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks index f61d941d6d..4fd1999ffb 100644 --- a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks +++ b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks | |||
@@ -15,7 +15,7 @@ | |||
15 | # | 15 | # |
16 | # - or any combinations of -r and --rootfs command line options | 16 | # - or any combinations of -r and --rootfs command line options |
17 | 17 | ||
18 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 18 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
19 | part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 | 19 | part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 |
20 | part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 | 20 | part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 |
21 | 21 | ||
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in index 7300e65e32..5211972955 100644 --- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in +++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in | |||
@@ -1,3 +1,3 @@ | |||
1 | bootloader --ptable gpt | 1 | bootloader --ptable gpt |
2 | part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.0 | 2 | part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2 |
3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ | 3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ |
diff --git a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in new file mode 100644 index 0000000000..cac0fa32cd --- /dev/null +++ b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in | |||
@@ -0,0 +1,3 @@ | |||
1 | bootloader --ptable gpt --timeout=5 | ||
2 | part /boot --source bootimg_efi --sourceparams="loader=${EFI_PROVIDER}" --label boot --active --align 1024 --use-uuid --part-name="ESP" --part-type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B --overhead-factor=1 | ||
3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ | ||
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks index 9f534fe184..16dfe76dfe 100644 --- a/scripts/lib/wic/canned-wks/mkefidisk.wks +++ b/scripts/lib/wic/canned-wks/mkefidisk.wks | |||
@@ -2,10 +2,10 @@ | |||
2 | # long-description: Creates a partitioned EFI disk image that the user | 2 | # long-description: Creates a partitioned EFI disk image that the user |
3 | # can directly dd to boot media. | 3 | # can directly dd to boot media. |
4 | 4 | ||
5 | part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 | 5 | part /boot --source bootimg_efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 |
6 | 6 | ||
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
9 | part swap --ondisk sda --size 44 --label swap1 --fstype=swap | 9 | part swap --ondisk sda --size 44 --label swap1 --fstype=swap |
10 | 10 | ||
11 | bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0" | 11 | bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=${KERNEL_CONSOLE} console=tty0" |
diff --git a/scripts/lib/wic/canned-wks/mkhybridiso.wks b/scripts/lib/wic/canned-wks/mkhybridiso.wks index 48c5ac4791..c3a030e5b4 100644 --- a/scripts/lib/wic/canned-wks/mkhybridiso.wks +++ b/scripts/lib/wic/canned-wks/mkhybridiso.wks | |||
@@ -2,6 +2,6 @@ | |||
2 | # long-description: Creates an EFI and legacy bootable hybrid ISO image | 2 | # long-description: Creates an EFI and legacy bootable hybrid ISO image |
3 | # which can be used on optical media as well as USB media. | 3 | # which can be used on optical media as well as USB media. |
4 | 4 | ||
5 | part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO | 5 | part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO |
6 | 6 | ||
7 | bootloader --timeout=15 --append="" | 7 | bootloader --timeout=15 --append="" |
diff --git a/scripts/lib/wic/canned-wks/qemuloongarch.wks b/scripts/lib/wic/canned-wks/qemuloongarch.wks new file mode 100644 index 0000000000..8465c7a8c0 --- /dev/null +++ b/scripts/lib/wic/canned-wks/qemuloongarch.wks | |||
@@ -0,0 +1,3 @@ | |||
1 | # short-description: Create qcow2 image for LoongArch QEMU machines | ||
2 | |||
3 | part / --source rootfs --fstype=ext4 --label root --align 4096 --size 5G | ||
diff --git a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks index 22b45217f1..808997611a 100644 --- a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks +++ b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks | |||
@@ -4,5 +4,5 @@ | |||
4 | 4 | ||
5 | include common.wks.inc | 5 | include common.wks.inc |
6 | 6 | ||
7 | bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 " | 7 | bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 console=tty console=ttyS0 " |
8 | 8 | ||
diff --git a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks index 63bc4dab6a..f9f8044f7d 100644 --- a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks +++ b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks | |||
@@ -2,5 +2,5 @@ | |||
2 | # long-description: Creates a partitioned SD card image. Boot files | 2 | # long-description: Creates a partitioned SD card image. Boot files |
3 | # are located in the first vfat partition. | 3 | # are located in the first vfat partition. |
4 | 4 | ||
5 | part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 | 5 | part /boot --source bootimg_partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 |
6 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 | 6 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 |
diff --git a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks index 95d7b97a60..3fb2c0e35f 100644 --- a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks +++ b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks | |||
@@ -2,7 +2,7 @@ | |||
2 | # long-description: Creates a partitioned EFI disk image that the user | 2 | # long-description: Creates a partitioned EFI disk image that the user |
3 | # can directly dd to boot media. The selected bootloader is systemd-boot. | 3 | # can directly dd to boot media. The selected bootloader is systemd-boot. |
4 | 4 | ||
5 | part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid | 5 | part /boot --source bootimg_efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid |
6 | 6 | ||
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py index 674ccfc244..b9e60cbe4e 100644 --- a/scripts/lib/wic/engine.py +++ b/scripts/lib/wic/engine.py | |||
@@ -180,6 +180,8 @@ def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir, | |||
180 | os.makedirs(options.outdir) | 180 | os.makedirs(options.outdir) |
181 | 181 | ||
182 | pname = options.imager | 182 | pname = options.imager |
183 | # Don't support '-' in plugin names | ||
184 | pname = pname.replace("-", "_") | ||
183 | plugin_class = PluginMgr.get_plugins('imager').get(pname) | 185 | plugin_class = PluginMgr.get_plugins('imager').get(pname) |
184 | if not plugin_class: | 186 | if not plugin_class: |
185 | raise WicError('Unknown plugin: %s' % pname) | 187 | raise WicError('Unknown plugin: %s' % pname) |
@@ -232,6 +234,16 @@ class Disk: | |||
232 | self._psector_size = None | 234 | self._psector_size = None |
233 | self._ptable_format = None | 235 | self._ptable_format = None |
234 | 236 | ||
237 | # define sector size | ||
238 | sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE') | ||
239 | if sector_size_str is not None: | ||
240 | try: | ||
241 | self.sector_size = int(sector_size_str) | ||
242 | except ValueError: | ||
243 | self.sector_size = None | ||
244 | else: | ||
245 | self.sector_size = None | ||
246 | |||
235 | # find parted | 247 | # find parted |
236 | # read paths from $PATH environment variable | 248 | # read paths from $PATH environment variable |
237 | # if it fails, use hardcoded paths | 249 | # if it fails, use hardcoded paths |
@@ -258,7 +270,13 @@ class Disk: | |||
258 | def get_partitions(self): | 270 | def get_partitions(self): |
259 | if self._partitions is None: | 271 | if self._partitions is None: |
260 | self._partitions = OrderedDict() | 272 | self._partitions = OrderedDict() |
261 | out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) | 273 | |
274 | if self.sector_size is not None: | ||
275 | out = exec_cmd("export PARTED_SECTOR_SIZE=%d; %s -sm %s unit B print" % \ | ||
276 | (self.sector_size, self.parted, self.imagepath), True) | ||
277 | else: | ||
278 | out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) | ||
279 | |||
262 | parttype = namedtuple("Part", "pnum start end size fstype") | 280 | parttype = namedtuple("Part", "pnum start end size fstype") |
263 | splitted = out.splitlines() | 281 | splitted = out.splitlines() |
264 | # skip over possible errors in exec_cmd output | 282 | # skip over possible errors in exec_cmd output |
@@ -359,7 +377,7 @@ class Disk: | |||
359 | Remove files/dirs and their contents from the partition. | 377 | Remove files/dirs and their contents from the partition. |
360 | This only applies to ext* partition. | 378 | This only applies to ext* partition. |
361 | """ | 379 | """ |
362 | abs_path = re.sub('\/\/+', '/', path) | 380 | abs_path = re.sub(r'\/\/+', '/', path) |
363 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, | 381 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, |
364 | self._get_part_image(pnum), | 382 | self._get_part_image(pnum), |
365 | abs_path) | 383 | abs_path) |
diff --git a/scripts/lib/wic/filemap.py b/scripts/lib/wic/filemap.py index 4d9da28172..85b39d5d74 100644 --- a/scripts/lib/wic/filemap.py +++ b/scripts/lib/wic/filemap.py | |||
@@ -46,6 +46,13 @@ def get_block_size(file_obj): | |||
46 | bsize = stat.st_blksize | 46 | bsize = stat.st_blksize |
47 | else: | 47 | else: |
48 | raise IOError("Unable to determine block size") | 48 | raise IOError("Unable to determine block size") |
49 | |||
50 | # The logic in this script only supports a maximum of a 4KB | ||
51 | # block size | ||
52 | max_block_size = 4 * 1024 | ||
53 | if bsize > max_block_size: | ||
54 | bsize = max_block_size | ||
55 | |||
49 | return bsize | 56 | return bsize |
50 | 57 | ||
51 | class ErrorNotSupp(Exception): | 58 | class ErrorNotSupp(Exception): |
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py index 4ff7470a6a..2e3061f343 100644 --- a/scripts/lib/wic/help.py +++ b/scripts/lib/wic/help.py | |||
@@ -544,18 +544,18 @@ DESCRIPTION | |||
544 | the --source param given to that partition. For example, if the | 544 | the --source param given to that partition. For example, if the |
545 | partition is set up like this: | 545 | partition is set up like this: |
546 | 546 | ||
547 | part /boot --source bootimg-pcbios ... | 547 | part /boot --source bootimg_pcbios ... |
548 | 548 | ||
549 | then the methods defined as class members of the plugin having the | 549 | then the methods defined as class members of the plugin having the |
550 | matching bootimg-pcbios .name class member would be used. | 550 | matching bootimg_pcbios .name class member would be used. |
551 | 551 | ||
552 | To be more concrete, here's the plugin definition that would match | 552 | To be more concrete, here's the plugin definition that would match |
553 | a '--source bootimg-pcbios' usage, along with an example method | 553 | a '--source bootimg_pcbios' usage, along with an example method |
554 | that would be called by the wic implementation when it needed to | 554 | that would be called by the wic implementation when it needed to |
555 | invoke an implementation-specific partition-preparation function: | 555 | invoke an implementation-specific partition-preparation function: |
556 | 556 | ||
557 | class BootimgPcbiosPlugin(SourcePlugin): | 557 | class BootimgPcbiosPlugin(SourcePlugin): |
558 | name = 'bootimg-pcbios' | 558 | name = 'bootimg_pcbios' |
559 | 559 | ||
560 | @classmethod | 560 | @classmethod |
561 | def do_prepare_partition(self, part, ...) | 561 | def do_prepare_partition(self, part, ...) |
@@ -794,7 +794,7 @@ DESCRIPTION | |||
794 | 794 | ||
795 | Here is a content of test.wks: | 795 | Here is a content of test.wks: |
796 | 796 | ||
797 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 797 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
798 | part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 | 798 | part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 |
799 | 799 | ||
800 | bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" | 800 | bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" |
@@ -916,6 +916,10 @@ DESCRIPTION | |||
916 | will create empty partition. --size parameter has | 916 | will create empty partition. --size parameter has |
917 | to be used to specify size of empty partition. | 917 | to be used to specify size of empty partition. |
918 | 918 | ||
919 | --sourceparams: This option is specific to wic. Supply additional | ||
920 | parameters to the source plugin in | ||
921 | key1=value1,key2 format. | ||
922 | |||
919 | --ondisk or --ondrive: Forces the partition to be created on | 923 | --ondisk or --ondrive: Forces the partition to be created on |
920 | a particular disk. | 924 | a particular disk. |
921 | 925 | ||
@@ -932,6 +936,7 @@ DESCRIPTION | |||
932 | squashfs | 936 | squashfs |
933 | erofs | 937 | erofs |
934 | swap | 938 | swap |
939 | none | ||
935 | 940 | ||
936 | --fsoptions: Specifies a free-form string of options to be | 941 | --fsoptions: Specifies a free-form string of options to be |
937 | used when mounting the filesystem. This string | 942 | used when mounting the filesystem. This string |
@@ -940,6 +945,12 @@ DESCRIPTION | |||
940 | quotes. If not specified, the default string is | 945 | quotes. If not specified, the default string is |
941 | "defaults". | 946 | "defaults". |
942 | 947 | ||
948 | --fspassno: Specifies the order in which filesystem checks are done | ||
949 | at boot time by fsck. See fs_passno parameter of | ||
950 | fstab(5). This parameter will be copied into the | ||
951 | /etc/fstab file of the installed system. If not | ||
952 | specified the default value of "0" will be used. | ||
953 | |||
943 | --label label: Specifies the label to give to the filesystem | 954 | --label label: Specifies the label to give to the filesystem |
944 | to be made on the partition. If the given | 955 | to be made on the partition. If the given |
945 | label is already in use by another filesystem, | 956 | label is already in use by another filesystem, |
@@ -959,6 +970,14 @@ DESCRIPTION | |||
959 | to start a partition on an x KBytes | 970 | to start a partition on an x KBytes |
960 | boundary. | 971 | boundary. |
961 | 972 | ||
973 | --offset: This option is specific to wic that says to place a partition | ||
974 | at exactly the specified offset. If the partition cannot be | ||
975 | placed at the specified offset, the image build will fail. | ||
976 | Specify as an integer value optionally followed by one of the | ||
977 | units s/S for 512 byte sector, k/K for kibibyte, M for | ||
978 | mebibyte and G for gibibyte. The default unit if none is | ||
979 | given is k. | ||
980 | |||
962 | --no-table: This option is specific to wic. Space will be | 981 | --no-table: This option is specific to wic. Space will be |
963 | reserved for the partition and it will be | 982 | reserved for the partition and it will be |
964 | populated but it will not be added to the | 983 | populated but it will not be added to the |
@@ -1039,6 +1058,18 @@ DESCRIPTION | |||
1039 | not take effect when --mkfs-extraopts is used. This should be taken into | 1058 | not take effect when --mkfs-extraopts is used. This should be taken into |
1040 | account when using --mkfs-extraopts. | 1059 | account when using --mkfs-extraopts. |
1041 | 1060 | ||
1061 | --type: This option is specific to wic. Valid values are 'primary', | ||
1062 | 'logical'. For msdos partition tables, this option specifies | ||
1063 | the partition type. | ||
1064 | |||
1065 | --hidden: This option is specific to wic. This option sets the | ||
1066 | RequiredPartition bit (bit 0) on GPT partitions. | ||
1067 | |||
1068 | --mbr: This option is specific to wic. This option is used with the | ||
1069 | gpt-hybrid partition type that uses both a GPT partition and | ||
1070 | an MBR header. Partitions with this flag will be included in | ||
1071 | this MBR header. | ||
1072 | |||
1042 | * bootloader | 1073 | * bootloader |
1043 | 1074 | ||
1044 | This command allows the user to specify various bootloader | 1075 | This command allows the user to specify various bootloader |
@@ -1057,6 +1088,13 @@ DESCRIPTION | |||
1057 | file. Using this option will override any other | 1088 | file. Using this option will override any other |
1058 | bootloader option. | 1089 | bootloader option. |
1059 | 1090 | ||
1091 | --ptable: Specifies the partition table format. Valid values are | ||
1092 | 'msdos', 'gpt', 'gpt-hybrid'. | ||
1093 | |||
1094 | --source: Specifies the source plugin. If not specified, the | ||
1095 | --source value will be copied from the partition that has | ||
1096 | /boot as mountpoint. | ||
1097 | |||
1060 | Note that bootloader functionality and boot partitions are | 1098 | Note that bootloader functionality and boot partitions are |
1061 | implemented by the various --source plugins that implement | 1099 | implemented by the various --source plugins that implement |
1062 | bootloader functionality; the bootloader command essentially | 1100 | bootloader functionality; the bootloader command essentially |
@@ -1112,7 +1150,7 @@ COMMAND: | |||
1112 | TOPIC: | 1150 | TOPIC: |
1113 | overview - Presents an overall overview of Wic | 1151 | overview - Presents an overall overview of Wic |
1114 | plugins - Presents an overview and API for Wic plugins | 1152 | plugins - Presents an overview and API for Wic plugins |
1115 | kickstart - Presents a Wic kicstart file reference | 1153 | kickstart - Presents a Wic kickstart file reference |
1116 | 1154 | ||
1117 | 1155 | ||
1118 | Examples: | 1156 | Examples: |
diff --git a/scripts/lib/wic/ksparser.py b/scripts/lib/wic/ksparser.py index 0df9eb0d05..7ef3dc83dd 100644 --- a/scripts/lib/wic/ksparser.py +++ b/scripts/lib/wic/ksparser.py | |||
@@ -155,10 +155,11 @@ class KickStart(): | |||
155 | part.add_argument('--change-directory') | 155 | part.add_argument('--change-directory') |
156 | part.add_argument("--extra-space", type=sizetype("M")) | 156 | part.add_argument("--extra-space", type=sizetype("M")) |
157 | part.add_argument('--fsoptions', dest='fsopts') | 157 | part.add_argument('--fsoptions', dest='fsopts') |
158 | part.add_argument('--fspassno', dest='fspassno') | ||
158 | part.add_argument('--fstype', default='vfat', | 159 | part.add_argument('--fstype', default='vfat', |
159 | choices=('ext2', 'ext3', 'ext4', 'btrfs', | 160 | choices=('ext2', 'ext3', 'ext4', 'btrfs', |
160 | 'squashfs', 'vfat', 'msdos', 'erofs', | 161 | 'squashfs', 'vfat', 'msdos', 'erofs', |
161 | 'swap')) | 162 | 'swap', 'none')) |
162 | part.add_argument('--mkfs-extraopts', default='') | 163 | part.add_argument('--mkfs-extraopts', default='') |
163 | part.add_argument('--label') | 164 | part.add_argument('--label') |
164 | part.add_argument('--use-label', action='store_true') | 165 | part.add_argument('--use-label', action='store_true') |
@@ -170,6 +171,7 @@ class KickStart(): | |||
170 | part.add_argument('--rootfs-dir') | 171 | part.add_argument('--rootfs-dir') |
171 | part.add_argument('--type', default='primary', | 172 | part.add_argument('--type', default='primary', |
172 | choices = ('primary', 'logical')) | 173 | choices = ('primary', 'logical')) |
174 | part.add_argument('--hidden', action='store_true') | ||
173 | 175 | ||
174 | # --size and --fixed-size cannot be specified together; options | 176 | # --size and --fixed-size cannot be specified together; options |
175 | # ----extra-space and --overhead-factor should also raise a parser | 177 | # ----extra-space and --overhead-factor should also raise a parser |
@@ -186,11 +188,12 @@ class KickStart(): | |||
186 | part.add_argument('--uuid') | 188 | part.add_argument('--uuid') |
187 | part.add_argument('--fsuuid') | 189 | part.add_argument('--fsuuid') |
188 | part.add_argument('--no-fstab-update', action='store_true') | 190 | part.add_argument('--no-fstab-update', action='store_true') |
191 | part.add_argument('--mbr', action='store_true') | ||
189 | 192 | ||
190 | bootloader = subparsers.add_parser('bootloader') | 193 | bootloader = subparsers.add_parser('bootloader') |
191 | bootloader.add_argument('--append') | 194 | bootloader.add_argument('--append') |
192 | bootloader.add_argument('--configfile') | 195 | bootloader.add_argument('--configfile') |
193 | bootloader.add_argument('--ptable', choices=('msdos', 'gpt'), | 196 | bootloader.add_argument('--ptable', choices=('msdos', 'gpt', 'gpt-hybrid'), |
194 | default='msdos') | 197 | default='msdos') |
195 | bootloader.add_argument('--timeout', type=int) | 198 | bootloader.add_argument('--timeout', type=int) |
196 | bootloader.add_argument('--source') | 199 | bootloader.add_argument('--source') |
diff --git a/scripts/lib/wic/misc.py b/scripts/lib/wic/misc.py index 3e11822996..1a7c140fa6 100644 --- a/scripts/lib/wic/misc.py +++ b/scripts/lib/wic/misc.py | |||
@@ -25,7 +25,7 @@ from wic import WicError | |||
25 | logger = logging.getLogger('wic') | 25 | logger = logging.getLogger('wic') |
26 | 26 | ||
27 | # executable -> recipe pairs for exec_native_cmd | 27 | # executable -> recipe pairs for exec_native_cmd |
28 | NATIVE_RECIPES = {"bmaptool": "bmap-tools", | 28 | NATIVE_RECIPES = {"bmaptool": "bmaptool", |
29 | "dumpe2fs": "e2fsprogs", | 29 | "dumpe2fs": "e2fsprogs", |
30 | "grub-mkimage": "grub-efi", | 30 | "grub-mkimage": "grub-efi", |
31 | "isohybrid": "syslinux", | 31 | "isohybrid": "syslinux", |
@@ -36,6 +36,7 @@ NATIVE_RECIPES = {"bmaptool": "bmap-tools", | |||
36 | "mkdosfs": "dosfstools", | 36 | "mkdosfs": "dosfstools", |
37 | "mkisofs": "cdrtools", | 37 | "mkisofs": "cdrtools", |
38 | "mkfs.btrfs": "btrfs-tools", | 38 | "mkfs.btrfs": "btrfs-tools", |
39 | "mkfs.erofs": "erofs-utils", | ||
39 | "mkfs.ext2": "e2fsprogs", | 40 | "mkfs.ext2": "e2fsprogs", |
40 | "mkfs.ext3": "e2fsprogs", | 41 | "mkfs.ext3": "e2fsprogs", |
41 | "mkfs.ext4": "e2fsprogs", | 42 | "mkfs.ext4": "e2fsprogs", |
@@ -140,11 +141,12 @@ def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""): | |||
140 | cmd_and_args = pseudo + cmd_and_args | 141 | cmd_and_args = pseudo + cmd_and_args |
141 | 142 | ||
142 | hosttools_dir = get_bitbake_var("HOSTTOOLS_DIR") | 143 | hosttools_dir = get_bitbake_var("HOSTTOOLS_DIR") |
144 | target_sys = get_bitbake_var("TARGET_SYS") | ||
143 | 145 | ||
144 | native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/bin:%s" % \ | 146 | native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/usr/bin/%s:%s/bin:%s" % \ |
145 | (native_sysroot, native_sysroot, | 147 | (native_sysroot, native_sysroot, |
146 | native_sysroot, native_sysroot, | 148 | native_sysroot, native_sysroot, target_sys, |
147 | hosttools_dir) | 149 | native_sysroot, hosttools_dir) |
148 | 150 | ||
149 | native_cmd_and_args = "export PATH=%s:$PATH;%s" % \ | 151 | native_cmd_and_args = "export PATH=%s:$PATH;%s" % \ |
150 | (native_paths, cmd_and_args) | 152 | (native_paths, cmd_and_args) |
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py index a25834048e..b34691d313 100644 --- a/scripts/lib/wic/partition.py +++ b/scripts/lib/wic/partition.py | |||
@@ -33,6 +33,7 @@ class Partition(): | |||
33 | self.include_path = args.include_path | 33 | self.include_path = args.include_path |
34 | self.change_directory = args.change_directory | 34 | self.change_directory = args.change_directory |
35 | self.fsopts = args.fsopts | 35 | self.fsopts = args.fsopts |
36 | self.fspassno = args.fspassno | ||
36 | self.fstype = args.fstype | 37 | self.fstype = args.fstype |
37 | self.label = args.label | 38 | self.label = args.label |
38 | self.use_label = args.use_label | 39 | self.use_label = args.use_label |
@@ -58,6 +59,8 @@ class Partition(): | |||
58 | self.updated_fstab_path = None | 59 | self.updated_fstab_path = None |
59 | self.has_fstab = False | 60 | self.has_fstab = False |
60 | self.update_fstab_in_rootfs = False | 61 | self.update_fstab_in_rootfs = False |
62 | self.hidden = args.hidden | ||
63 | self.mbr = args.mbr | ||
61 | 64 | ||
62 | self.lineno = lineno | 65 | self.lineno = lineno |
63 | self.source_file = "" | 66 | self.source_file = "" |
@@ -132,6 +135,8 @@ class Partition(): | |||
132 | self.update_fstab_in_rootfs = True | 135 | self.update_fstab_in_rootfs = True |
133 | 136 | ||
134 | if not self.source: | 137 | if not self.source: |
138 | if self.fstype == "none" or self.no_table: | ||
139 | return | ||
135 | if not self.size and not self.fixed_size: | 140 | if not self.size and not self.fixed_size: |
136 | raise WicError("The %s partition has a size of zero. Please " | 141 | raise WicError("The %s partition has a size of zero. Please " |
137 | "specify a non-zero --size/--fixed-size for that " | 142 | "specify a non-zero --size/--fixed-size for that " |
@@ -159,6 +164,9 @@ class Partition(): | |||
159 | 164 | ||
160 | plugins = PluginMgr.get_plugins('source') | 165 | plugins = PluginMgr.get_plugins('source') |
161 | 166 | ||
167 | # Don't support '-' in plugin names | ||
168 | self.source = self.source.replace("-", "_") | ||
169 | |||
162 | if self.source not in plugins: | 170 | if self.source not in plugins: |
163 | raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" | 171 | raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" |
164 | "See 'wic list source-plugins' for a list of available" | 172 | "See 'wic list source-plugins' for a list of available" |
@@ -171,9 +179,9 @@ class Partition(): | |||
171 | # Split sourceparams string of the form key1=val1[,key2=val2,...] | 179 | # Split sourceparams string of the form key1=val1[,key2=val2,...] |
172 | # into a dict. Also accepts valueless keys i.e. without = | 180 | # into a dict. Also accepts valueless keys i.e. without = |
173 | splitted = self.sourceparams.split(',') | 181 | splitted = self.sourceparams.split(',') |
174 | srcparams_dict = dict(par.split('=', 1) for par in splitted if par) | 182 | srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) |
175 | 183 | ||
176 | plugin = PluginMgr.get_plugins('source')[self.source] | 184 | plugin = plugins[self.source] |
177 | plugin.do_configure_partition(self, srcparams_dict, creator, | 185 | plugin.do_configure_partition(self, srcparams_dict, creator, |
178 | cr_workdir, oe_builddir, bootimg_dir, | 186 | cr_workdir, oe_builddir, bootimg_dir, |
179 | kernel_dir, native_sysroot) | 187 | kernel_dir, native_sysroot) |
@@ -217,19 +225,19 @@ class Partition(): | |||
217 | if (pseudo_dir): | 225 | if (pseudo_dir): |
218 | # Canonicalize the ignore paths. This corresponds to | 226 | # Canonicalize the ignore paths. This corresponds to |
219 | # calling oe.path.canonicalize(), which is used in bitbake.conf. | 227 | # calling oe.path.canonicalize(), which is used in bitbake.conf. |
220 | ignore_paths = [rootfs] + (get_bitbake_var("PSEUDO_IGNORE_PATHS") or "").split(",") | 228 | include_paths = [rootfs_dir] + (get_bitbake_var("PSEUDO_INCLUDE_PATHS") or "").split(",") |
221 | canonical_paths = [] | 229 | canonical_paths = [] |
222 | for path in ignore_paths: | 230 | for path in include_paths: |
223 | if "$" not in path: | 231 | if "$" not in path: |
224 | trailing_slash = path.endswith("/") and "/" or "" | 232 | trailing_slash = path.endswith("/") and "/" or "" |
225 | canonical_paths.append(os.path.realpath(path) + trailing_slash) | 233 | canonical_paths.append(os.path.realpath(path) + trailing_slash) |
226 | ignore_paths = ",".join(canonical_paths) | 234 | include_paths = ",".join(canonical_paths) |
227 | 235 | ||
228 | pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix | 236 | pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix |
229 | pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir | 237 | pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir |
230 | pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir | 238 | pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir |
231 | pseudo += "export PSEUDO_NOSYMLINKEXP=1;" | 239 | pseudo += "export PSEUDO_NOSYMLINKEXP=1;" |
232 | pseudo += "export PSEUDO_IGNORE_PATHS=%s;" % ignore_paths | 240 | pseudo += "export PSEUDO_INCLUDE_PATHS=%s;" % include_paths |
233 | pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") | 241 | pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") |
234 | else: | 242 | else: |
235 | pseudo = None | 243 | pseudo = None |
@@ -239,7 +247,7 @@ class Partition(): | |||
239 | # from bitbake variable | 247 | # from bitbake variable |
240 | rsize_bb = get_bitbake_var('ROOTFS_SIZE') | 248 | rsize_bb = get_bitbake_var('ROOTFS_SIZE') |
241 | rdir = get_bitbake_var('IMAGE_ROOTFS') | 249 | rdir = get_bitbake_var('IMAGE_ROOTFS') |
242 | if rsize_bb and rdir == rootfs_dir: | 250 | if rsize_bb and (rdir == rootfs_dir or (rootfs_dir.split('/')[-2] == "tmp-wic" and rootfs_dir.split('/')[-1][:6] == "rootfs")): |
243 | # Bitbake variable ROOTFS_SIZE is calculated in | 251 | # Bitbake variable ROOTFS_SIZE is calculated in |
244 | # Image._get_rootfs_size method from meta/lib/oe/image.py | 252 | # Image._get_rootfs_size method from meta/lib/oe/image.py |
245 | # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, | 253 | # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, |
@@ -279,6 +287,9 @@ class Partition(): | |||
279 | 287 | ||
280 | extraopts = self.mkfs_extraopts or "-F -i 8192" | 288 | extraopts = self.mkfs_extraopts or "-F -i 8192" |
281 | 289 | ||
290 | # use hash_seed to generate reproducible ext4 images | ||
291 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo) | ||
292 | |||
282 | label_str = "" | 293 | label_str = "" |
283 | if self.label: | 294 | if self.label: |
284 | label_str = "-L %s" % self.label | 295 | label_str = "-L %s" % self.label |
@@ -299,8 +310,49 @@ class Partition(): | |||
299 | mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs) | 310 | mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs) |
300 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) | 311 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) |
301 | 312 | ||
313 | if os.getenv('SOURCE_DATE_EPOCH'): | ||
314 | sde_time = hex(int(os.getenv('SOURCE_DATE_EPOCH'))) | ||
315 | debugfs_script_path = os.path.join(cr_workdir, "debugfs_script") | ||
316 | files = [] | ||
317 | for root, dirs, others in os.walk(rootfs_dir): | ||
318 | base = root.replace(rootfs_dir, "").rstrip(os.sep) | ||
319 | files += [ "/" if base == "" else base ] | ||
320 | files += [ base + "/" + n for n in dirs + others ] | ||
321 | with open(debugfs_script_path, "w") as f: | ||
322 | f.write("set_current_time %s\n" % (sde_time)) | ||
323 | if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update: | ||
324 | f.write("set_inode_field /etc/fstab mtime %s\n" % (sde_time)) | ||
325 | f.write("set_inode_field /etc/fstab mtime_extra 0\n") | ||
326 | for file in set(files): | ||
327 | for time in ["atime", "ctime", "crtime"]: | ||
328 | f.write("set_inode_field \"%s\" %s %s\n" % (file, time, sde_time)) | ||
329 | f.write("set_inode_field \"%s\" %s_extra 0\n" % (file, time)) | ||
330 | for time in ["wtime", "mkfs_time", "lastcheck"]: | ||
331 | f.write("set_super_value %s %s\n" % (time, sde_time)) | ||
332 | for time in ["mtime", "first_error_time", "last_error_time"]: | ||
333 | f.write("set_super_value %s 0\n" % (time)) | ||
334 | debugfs_cmd = "debugfs -w -f %s %s" % (debugfs_script_path, rootfs) | ||
335 | exec_native_cmd(debugfs_cmd, native_sysroot) | ||
336 | |||
302 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 337 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
303 | 338 | ||
339 | def get_hash_seed_ext4(self, extraopts, pseudo): | ||
340 | if os.getenv('SOURCE_DATE_EPOCH'): | ||
341 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | ||
342 | if pseudo: | ||
343 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
344 | else: | ||
345 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
346 | |||
347 | # Set hash_seed to generate deterministic directory indexes | ||
348 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
349 | if self.fsuuid: | ||
350 | namespace = uuid.UUID(self.fsuuid) | ||
351 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
352 | extraopts += " -E hash_seed=%s" % hash_seed | ||
353 | |||
354 | return (extraopts, pseudo) | ||
355 | |||
304 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, | 356 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, |
305 | native_sysroot, pseudo): | 357 | native_sysroot, pseudo): |
306 | """ | 358 | """ |
@@ -352,7 +404,7 @@ class Partition(): | |||
352 | exec_native_cmd(mcopy_cmd, native_sysroot) | 404 | exec_native_cmd(mcopy_cmd, native_sysroot) |
353 | 405 | ||
354 | if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update: | 406 | if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update: |
355 | mcopy_cmd = "mcopy -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path) | 407 | mcopy_cmd = "mcopy -m -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path) |
356 | exec_native_cmd(mcopy_cmd, native_sysroot) | 408 | exec_native_cmd(mcopy_cmd, native_sysroot) |
357 | 409 | ||
358 | chmod_cmd = "chmod 644 %s" % rootfs | 410 | chmod_cmd = "chmod 644 %s" % rootfs |
@@ -380,6 +432,9 @@ class Partition(): | |||
380 | (extraopts, self.fsuuid, rootfs, rootfs_dir) | 432 | (extraopts, self.fsuuid, rootfs, rootfs_dir) |
381 | exec_native_cmd(erofs_cmd, native_sysroot, pseudo=pseudo) | 433 | exec_native_cmd(erofs_cmd, native_sysroot, pseudo=pseudo) |
382 | 434 | ||
435 | def prepare_empty_partition_none(self, rootfs, oe_builddir, native_sysroot): | ||
436 | pass | ||
437 | |||
383 | def prepare_empty_partition_ext(self, rootfs, oe_builddir, | 438 | def prepare_empty_partition_ext(self, rootfs, oe_builddir, |
384 | native_sysroot): | 439 | native_sysroot): |
385 | """ | 440 | """ |
@@ -391,13 +446,16 @@ class Partition(): | |||
391 | 446 | ||
392 | extraopts = self.mkfs_extraopts or "-i 8192" | 447 | extraopts = self.mkfs_extraopts or "-i 8192" |
393 | 448 | ||
449 | # use hash_seed to generate reproducible ext4 images | ||
450 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None) | ||
451 | |||
394 | label_str = "" | 452 | label_str = "" |
395 | if self.label: | 453 | if self.label: |
396 | label_str = "-L %s" % self.label | 454 | label_str = "-L %s" % self.label |
397 | 455 | ||
398 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ | 456 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ |
399 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) | 457 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) |
400 | exec_native_cmd(mkfs_cmd, native_sysroot) | 458 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) |
401 | 459 | ||
402 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 460 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
403 | 461 | ||
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py index d9b4e57747..640da292d3 100644 --- a/scripts/lib/wic/pluginbase.py +++ b/scripts/lib/wic/pluginbase.py | |||
@@ -9,9 +9,11 @@ __all__ = ['ImagerPlugin', 'SourcePlugin'] | |||
9 | 9 | ||
10 | import os | 10 | import os |
11 | import logging | 11 | import logging |
12 | import types | ||
12 | 13 | ||
13 | from collections import defaultdict | 14 | from collections import defaultdict |
14 | from importlib.machinery import SourceFileLoader | 15 | import importlib |
16 | import importlib.util | ||
15 | 17 | ||
16 | from wic import WicError | 18 | from wic import WicError |
17 | from wic.misc import get_bitbake_var | 19 | from wic.misc import get_bitbake_var |
@@ -42,7 +44,7 @@ class PluginMgr: | |||
42 | path = os.path.join(layer_path, script_plugin_dir) | 44 | path = os.path.join(layer_path, script_plugin_dir) |
43 | path = os.path.abspath(os.path.expanduser(path)) | 45 | path = os.path.abspath(os.path.expanduser(path)) |
44 | if path not in cls._plugin_dirs and os.path.isdir(path): | 46 | if path not in cls._plugin_dirs and os.path.isdir(path): |
45 | cls._plugin_dirs.insert(0, path) | 47 | cls._plugin_dirs.append(path) |
46 | 48 | ||
47 | if ptype not in PLUGINS: | 49 | if ptype not in PLUGINS: |
48 | # load all ptype plugins | 50 | # load all ptype plugins |
@@ -54,7 +56,9 @@ class PluginMgr: | |||
54 | mname = fname[:-3] | 56 | mname = fname[:-3] |
55 | mpath = os.path.join(ppath, fname) | 57 | mpath = os.path.join(ppath, fname) |
56 | logger.debug("loading plugin module %s", mpath) | 58 | logger.debug("loading plugin module %s", mpath) |
57 | SourceFileLoader(mname, mpath).load_module() | 59 | spec = importlib.util.spec_from_file_location(mname, mpath) |
60 | module = importlib.util.module_from_spec(spec) | ||
61 | spec.loader.exec_module(module) | ||
58 | 62 | ||
59 | return PLUGINS.get(ptype) | 63 | return PLUGINS.get(ptype) |
60 | 64 | ||
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py index 35fff7c102..6e1f1c8cba 100644 --- a/scripts/lib/wic/plugins/imager/direct.py +++ b/scripts/lib/wic/plugins/imager/direct.py | |||
@@ -117,7 +117,7 @@ class DirectPlugin(ImagerPlugin): | |||
117 | updated = False | 117 | updated = False |
118 | for part in self.parts: | 118 | for part in self.parts: |
119 | if not part.realnum or not part.mountpoint \ | 119 | if not part.realnum or not part.mountpoint \ |
120 | or part.mountpoint == "/" or not part.mountpoint.startswith('/'): | 120 | or part.mountpoint == "/" or not (part.mountpoint.startswith('/') or part.mountpoint == "swap"): |
121 | continue | 121 | continue |
122 | 122 | ||
123 | if part.use_uuid: | 123 | if part.use_uuid: |
@@ -138,8 +138,9 @@ class DirectPlugin(ImagerPlugin): | |||
138 | device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum) | 138 | device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum) |
139 | 139 | ||
140 | opts = part.fsopts if part.fsopts else "defaults" | 140 | opts = part.fsopts if part.fsopts else "defaults" |
141 | passno = part.fspassno if part.fspassno else "0" | ||
141 | line = "\t".join([device_name, part.mountpoint, part.fstype, | 142 | line = "\t".join([device_name, part.mountpoint, part.fstype, |
142 | opts, "0", "0"]) + "\n" | 143 | opts, "0", passno]) + "\n" |
143 | 144 | ||
144 | fstab_lines.append(line) | 145 | fstab_lines.append(line) |
145 | updated = True | 146 | updated = True |
@@ -148,6 +149,9 @@ class DirectPlugin(ImagerPlugin): | |||
148 | self.updated_fstab_path = os.path.join(self.workdir, "fstab") | 149 | self.updated_fstab_path = os.path.join(self.workdir, "fstab") |
149 | with open(self.updated_fstab_path, "w") as f: | 150 | with open(self.updated_fstab_path, "w") as f: |
150 | f.writelines(fstab_lines) | 151 | f.writelines(fstab_lines) |
152 | if os.getenv('SOURCE_DATE_EPOCH'): | ||
153 | fstab_time = int(os.getenv('SOURCE_DATE_EPOCH')) | ||
154 | os.utime(self.updated_fstab_path, (fstab_time, fstab_time)) | ||
151 | 155 | ||
152 | def _full_path(self, path, name, extention): | 156 | def _full_path(self, path, name, extention): |
153 | """ Construct full file path to a file we generate. """ | 157 | """ Construct full file path to a file we generate. """ |
@@ -199,6 +203,8 @@ class DirectPlugin(ImagerPlugin): | |||
199 | source_plugin = self.ks.bootloader.source | 203 | source_plugin = self.ks.bootloader.source |
200 | disk_name = self.parts[0].disk | 204 | disk_name = self.parts[0].disk |
201 | if source_plugin: | 205 | if source_plugin: |
206 | # Don't support '-' in plugin names | ||
207 | source_plugin = source_plugin.replace("-", "_") | ||
202 | plugin = PluginMgr.get_plugins('source')[source_plugin] | 208 | plugin = PluginMgr.get_plugins('source')[source_plugin] |
203 | plugin.do_install_disk(self._image, disk_name, self, self.workdir, | 209 | plugin.do_install_disk(self._image, disk_name, self, self.workdir, |
204 | self.oe_builddir, self.bootimg_dir, | 210 | self.oe_builddir, self.bootimg_dir, |
@@ -259,7 +265,7 @@ class DirectPlugin(ImagerPlugin): | |||
259 | if part.mountpoint == "/": | 265 | if part.mountpoint == "/": |
260 | if part.uuid: | 266 | if part.uuid: |
261 | return "PARTUUID=%s" % part.uuid | 267 | return "PARTUUID=%s" % part.uuid |
262 | elif part.label: | 268 | elif part.label and self.ptable_format != 'msdos': |
263 | return "PARTLABEL=%s" % part.label | 269 | return "PARTLABEL=%s" % part.label |
264 | else: | 270 | else: |
265 | suffix = 'p' if part.disk.startswith('mmcblk') else '' | 271 | suffix = 'p' if part.disk.startswith('mmcblk') else '' |
@@ -309,12 +315,23 @@ class PartitionedImage(): | |||
309 | # all partitions (in bytes) | 315 | # all partitions (in bytes) |
310 | self.ptable_format = ptable_format # Partition table format | 316 | self.ptable_format = ptable_format # Partition table format |
311 | # Disk system identifier | 317 | # Disk system identifier |
312 | self.identifier = random.SystemRandom().randint(1, 0xffffffff) | 318 | if os.getenv('SOURCE_DATE_EPOCH'): |
319 | self.identifier = random.Random(int(os.getenv('SOURCE_DATE_EPOCH'))).randint(1, 0xffffffff) | ||
320 | else: | ||
321 | self.identifier = random.SystemRandom().randint(1, 0xffffffff) | ||
313 | 322 | ||
314 | self.partitions = partitions | 323 | self.partitions = partitions |
315 | self.partimages = [] | 324 | self.partimages = [] |
316 | # Size of a sector used in calculations | 325 | # Size of a sector used in calculations |
317 | self.sector_size = SECTOR_SIZE | 326 | sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE') |
327 | if sector_size_str is not None: | ||
328 | try: | ||
329 | self.sector_size = int(sector_size_str) | ||
330 | except ValueError: | ||
331 | self.sector_size = SECTOR_SIZE | ||
332 | else: | ||
333 | self.sector_size = SECTOR_SIZE | ||
334 | |||
318 | self.native_sysroot = native_sysroot | 335 | self.native_sysroot = native_sysroot |
319 | num_real_partitions = len([p for p in self.partitions if not p.no_table]) | 336 | num_real_partitions = len([p for p in self.partitions if not p.no_table]) |
320 | self.extra_space = extra_space | 337 | self.extra_space = extra_space |
@@ -335,7 +352,7 @@ class PartitionedImage(): | |||
335 | # generate parition and filesystem UUIDs | 352 | # generate parition and filesystem UUIDs |
336 | for part in self.partitions: | 353 | for part in self.partitions: |
337 | if not part.uuid and part.use_uuid: | 354 | if not part.uuid and part.use_uuid: |
338 | if self.ptable_format == 'gpt': | 355 | if self.ptable_format in ('gpt', 'gpt-hybrid'): |
339 | part.uuid = str(uuid.uuid4()) | 356 | part.uuid = str(uuid.uuid4()) |
340 | else: # msdos partition table | 357 | else: # msdos partition table |
341 | part.uuid = '%08x-%02d' % (self.identifier, part.realnum) | 358 | part.uuid = '%08x-%02d' % (self.identifier, part.realnum) |
@@ -391,6 +408,10 @@ class PartitionedImage(): | |||
391 | raise WicError("setting custom partition type is not " \ | 408 | raise WicError("setting custom partition type is not " \ |
392 | "implemented for msdos partitions") | 409 | "implemented for msdos partitions") |
393 | 410 | ||
411 | if part.mbr and self.ptable_format != 'gpt-hybrid': | ||
412 | raise WicError("Partition may only be included in MBR with " \ | ||
413 | "a gpt-hybrid partition table") | ||
414 | |||
394 | # Get the disk where the partition is located | 415 | # Get the disk where the partition is located |
395 | self.numpart += 1 | 416 | self.numpart += 1 |
396 | if not part.no_table: | 417 | if not part.no_table: |
@@ -399,7 +420,7 @@ class PartitionedImage(): | |||
399 | if self.numpart == 1: | 420 | if self.numpart == 1: |
400 | if self.ptable_format == "msdos": | 421 | if self.ptable_format == "msdos": |
401 | overhead = MBR_OVERHEAD | 422 | overhead = MBR_OVERHEAD |
402 | elif self.ptable_format == "gpt": | 423 | elif self.ptable_format in ("gpt", "gpt-hybrid"): |
403 | overhead = GPT_OVERHEAD | 424 | overhead = GPT_OVERHEAD |
404 | 425 | ||
405 | # Skip one sector required for the partitioning scheme overhead | 426 | # Skip one sector required for the partitioning scheme overhead |
@@ -483,7 +504,7 @@ class PartitionedImage(): | |||
483 | # Once all the partitions have been layed out, we can calculate the | 504 | # Once all the partitions have been layed out, we can calculate the |
484 | # minumim disk size | 505 | # minumim disk size |
485 | self.min_size = self.offset | 506 | self.min_size = self.offset |
486 | if self.ptable_format == "gpt": | 507 | if self.ptable_format in ("gpt", "gpt-hybrid"): |
487 | self.min_size += GPT_OVERHEAD | 508 | self.min_size += GPT_OVERHEAD |
488 | 509 | ||
489 | self.min_size *= self.sector_size | 510 | self.min_size *= self.sector_size |
@@ -497,29 +518,58 @@ class PartitionedImage(): | |||
497 | logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", | 518 | logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", |
498 | parttype, start, end, size) | 519 | parttype, start, end, size) |
499 | 520 | ||
500 | cmd = "parted -s %s unit s mkpart %s" % (device, parttype) | 521 | cmd = "export PARTED_SECTOR_SIZE=%d; parted -s %s unit s mkpart %s" % \ |
522 | (self.sector_size, device, parttype) | ||
501 | if fstype: | 523 | if fstype: |
502 | cmd += " %s" % fstype | 524 | cmd += " %s" % fstype |
503 | cmd += " %d %d" % (start, end) | 525 | cmd += " %d %d" % (start, end) |
504 | 526 | ||
505 | return exec_native_cmd(cmd, self.native_sysroot) | 527 | return exec_native_cmd(cmd, self.native_sysroot) |
506 | 528 | ||
529 | def _write_identifier(self, device, identifier): | ||
530 | logger.debug("Set disk identifier %x", identifier) | ||
531 | with open(device, 'r+b') as img: | ||
532 | img.seek(0x1B8) | ||
533 | img.write(identifier.to_bytes(4, 'little')) | ||
534 | |||
535 | def _make_disk(self, device, ptable_format, min_size): | ||
536 | logger.debug("Creating sparse file %s", device) | ||
537 | with open(device, 'w') as sparse: | ||
538 | os.ftruncate(sparse.fileno(), min_size) | ||
539 | |||
540 | logger.debug("Initializing partition table for %s", device) | ||
541 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s mklabel %s" % | ||
542 | (self.sector_size, device, ptable_format), self.native_sysroot) | ||
543 | |||
544 | def _write_disk_guid(self): | ||
545 | if self.ptable_format in ('gpt', 'gpt-hybrid'): | ||
546 | if os.getenv('SOURCE_DATE_EPOCH'): | ||
547 | self.disk_guid = uuid.UUID(int=int(os.getenv('SOURCE_DATE_EPOCH'))) | ||
548 | else: | ||
549 | self.disk_guid = uuid.uuid4() | ||
550 | |||
551 | logger.debug("Set disk guid %s", self.disk_guid) | ||
552 | sfdisk_cmd = "sfdisk --sector-size %s --disk-id %s %s" % \ | ||
553 | (self.sector_size, self.path, self.disk_guid) | ||
554 | exec_native_cmd(sfdisk_cmd, self.native_sysroot) | ||
555 | |||
507 | def create(self): | 556 | def create(self): |
508 | logger.debug("Creating sparse file %s", self.path) | 557 | self._make_disk(self.path, |
509 | with open(self.path, 'w') as sparse: | 558 | "gpt" if self.ptable_format == "gpt-hybrid" else self.ptable_format, |
510 | os.ftruncate(sparse.fileno(), self.min_size) | 559 | self.min_size) |
511 | 560 | ||
512 | logger.debug("Initializing partition table for %s", self.path) | 561 | self._write_identifier(self.path, self.identifier) |
513 | exec_native_cmd("parted -s %s mklabel %s" % | 562 | self._write_disk_guid() |
514 | (self.path, self.ptable_format), self.native_sysroot) | ||
515 | 563 | ||
516 | logger.debug("Set disk identifier %x", self.identifier) | 564 | if self.ptable_format == "gpt-hybrid": |
517 | with open(self.path, 'r+b') as img: | 565 | mbr_path = self.path + ".mbr" |
518 | img.seek(0x1B8) | 566 | self._make_disk(mbr_path, "msdos", self.min_size) |
519 | img.write(self.identifier.to_bytes(4, 'little')) | 567 | self._write_identifier(mbr_path, self.identifier) |
520 | 568 | ||
521 | logger.debug("Creating partitions") | 569 | logger.debug("Creating partitions") |
522 | 570 | ||
571 | hybrid_mbr_part_num = 0 | ||
572 | |||
523 | for part in self.partitions: | 573 | for part in self.partitions: |
524 | if part.num == 0: | 574 | if part.num == 0: |
525 | continue | 575 | continue |
@@ -564,46 +614,77 @@ class PartitionedImage(): | |||
564 | self._create_partition(self.path, part.type, | 614 | self._create_partition(self.path, part.type, |
565 | parted_fs_type, part.start, part.size_sec) | 615 | parted_fs_type, part.start, part.size_sec) |
566 | 616 | ||
567 | if part.part_name: | 617 | if self.ptable_format == "gpt-hybrid" and part.mbr: |
568 | logger.debug("partition %d: set name to %s", | 618 | hybrid_mbr_part_num += 1 |
569 | part.num, part.part_name) | 619 | if hybrid_mbr_part_num > 4: |
570 | exec_native_cmd("sgdisk --change-name=%d:%s %s" % \ | 620 | raise WicError("Extended MBR partitions are not supported in hybrid MBR") |
571 | (part.num, part.part_name, | 621 | self._create_partition(mbr_path, "primary", |
572 | self.path), self.native_sysroot) | 622 | parted_fs_type, part.start, part.size_sec) |
573 | 623 | ||
624 | if self.ptable_format in ("gpt", "gpt-hybrid") and (part.part_name or part.label): | ||
625 | partition_label = part.part_name if part.part_name else part.label | ||
626 | logger.debug("partition %d: set name to %s", | ||
627 | part.num, partition_label) | ||
628 | exec_native_cmd("sfdisk --sector-size %s --part-label %s %d %s" % \ | ||
629 | (self.sector_size, self.path, part.num, | ||
630 | partition_label), self.native_sysroot) | ||
574 | if part.part_type: | 631 | if part.part_type: |
575 | logger.debug("partition %d: set type UID to %s", | 632 | logger.debug("partition %d: set type UID to %s", |
576 | part.num, part.part_type) | 633 | part.num, part.part_type) |
577 | exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ | 634 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %d %s" % \ |
578 | (part.num, part.part_type, | 635 | (self.sector_size, self.path, part.num, |
579 | self.path), self.native_sysroot) | 636 | part.part_type), self.native_sysroot) |
580 | 637 | ||
581 | if part.uuid and self.ptable_format == "gpt": | 638 | if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): |
582 | logger.debug("partition %d: set UUID to %s", | 639 | logger.debug("partition %d: set UUID to %s", |
583 | part.num, part.uuid) | 640 | part.num, part.uuid) |
584 | exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ | 641 | exec_native_cmd("sfdisk --sector-size %s --part-uuid %s %d %s" % \ |
585 | (part.num, part.uuid, self.path), | 642 | (self.sector_size, self.path, part.num, part.uuid), |
586 | self.native_sysroot) | ||
587 | |||
588 | if part.label and self.ptable_format == "gpt": | ||
589 | logger.debug("partition %d: set name to %s", | ||
590 | part.num, part.label) | ||
591 | exec_native_cmd("parted -s %s name %d %s" % \ | ||
592 | (self.path, part.num, part.label), | ||
593 | self.native_sysroot) | 643 | self.native_sysroot) |
594 | 644 | ||
595 | if part.active: | 645 | if part.active: |
596 | flag_name = "legacy_boot" if self.ptable_format == 'gpt' else "boot" | 646 | flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" |
597 | logger.debug("Set '%s' flag for partition '%s' on disk '%s'", | 647 | logger.debug("Set '%s' flag for partition '%s' on disk '%s'", |
598 | flag_name, part.num, self.path) | 648 | flag_name, part.num, self.path) |
599 | exec_native_cmd("parted -s %s set %d %s on" % \ | 649 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \ |
600 | (self.path, part.num, flag_name), | 650 | (self.sector_size, self.path, part.num, flag_name), |
601 | self.native_sysroot) | 651 | self.native_sysroot) |
652 | if self.ptable_format == 'gpt-hybrid' and part.mbr: | ||
653 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \ | ||
654 | (self.sector_size, mbr_path, hybrid_mbr_part_num, "boot"), | ||
655 | self.native_sysroot) | ||
602 | if part.system_id: | 656 | if part.system_id: |
603 | exec_native_cmd("sfdisk --part-type %s %s %s" % \ | 657 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %s %s" % \ |
604 | (self.path, part.num, part.system_id), | 658 | (self.sector_size, self.path, part.num, part.system_id), |
605 | self.native_sysroot) | 659 | self.native_sysroot) |
606 | 660 | ||
661 | if part.hidden and self.ptable_format == "gpt": | ||
662 | logger.debug("Set hidden attribute for partition '%s' on disk '%s'", | ||
663 | part.num, self.path) | ||
664 | exec_native_cmd("sfdisk --sector-size %s --part-attrs %s %s RequiredPartition" % \ | ||
665 | (self.sector_size, self.path, part.num), | ||
666 | self.native_sysroot) | ||
667 | |||
668 | if self.ptable_format == "gpt-hybrid": | ||
669 | # Write a protective GPT partition | ||
670 | hybrid_mbr_part_num += 1 | ||
671 | if hybrid_mbr_part_num > 4: | ||
672 | raise WicError("Extended MBR partitions are not supported in hybrid MBR") | ||
673 | |||
674 | # parted cannot directly create a protective GPT partition, so | ||
675 | # create with an arbitrary type, then change it to the correct type | ||
676 | # with sfdisk | ||
677 | self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) | ||
678 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %d 0xee" % \ | ||
679 | (self.sector_size, mbr_path, hybrid_mbr_part_num), | ||
680 | self.native_sysroot) | ||
681 | |||
682 | # Copy hybrid MBR | ||
683 | with open(mbr_path, "rb") as mbr_file: | ||
684 | with open(self.path, "r+b") as image_file: | ||
685 | mbr = mbr_file.read(512) | ||
686 | image_file.write(mbr) | ||
687 | |||
607 | def cleanup(self): | 688 | def cleanup(self): |
608 | pass | 689 | pass |
609 | 690 | ||
diff --git a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py index 5bd7390680..4279ddded8 100644 --- a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py +++ b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py | |||
@@ -13,7 +13,7 @@ | |||
13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
14 | # | 14 | # |
15 | # DESCRIPTION | 15 | # DESCRIPTION |
16 | # This implements the 'bootimg-biosplusefi' source plugin class for 'wic' | 16 | # This implements the 'bootimg_biosplusefi' source plugin class for 'wic' |
17 | # | 17 | # |
18 | # AUTHORS | 18 | # AUTHORS |
19 | # William Bourque <wbourque [at) gmail.com> | 19 | # William Bourque <wbourque [at) gmail.com> |
@@ -34,7 +34,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
34 | 34 | ||
35 | Note it is possible to create an image that can boot from both | 35 | Note it is possible to create an image that can boot from both |
36 | legacy BIOS and EFI by defining two partitions : one with arg | 36 | legacy BIOS and EFI by defining two partitions : one with arg |
37 | --source bootimg-efi and another one with --source bootimg-pcbios. | 37 | --source bootimg_efi and another one with --source bootimg_pcbios. |
38 | However, this method has the obvious downside that it requires TWO | 38 | However, this method has the obvious downside that it requires TWO |
39 | partitions to be created on the storage device. | 39 | partitions to be created on the storage device. |
40 | Both partitions will also be marked as "bootable" which does not work on | 40 | Both partitions will also be marked as "bootable" which does not work on |
@@ -45,7 +45,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
45 | the first partition will be duplicated into the second, even though it | 45 | the first partition will be duplicated into the second, even though it |
46 | will not be used at all. | 46 | will not be used at all. |
47 | 47 | ||
48 | Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin | 48 | Also, unlike "isoimage_isohybrid" that also does BIOS and EFI, this plugin |
49 | allows you to have more than only a single rootfs partitions and does | 49 | allows you to have more than only a single rootfs partitions and does |
50 | not turn the rootfs into an initramfs RAM image. | 50 | not turn the rootfs into an initramfs RAM image. |
51 | 51 | ||
@@ -53,32 +53,32 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
53 | does not have the limitations listed above. | 53 | does not have the limitations listed above. |
54 | 54 | ||
55 | The plugin is made so it does tries not to reimplement what's already | 55 | The plugin is made so it does tries not to reimplement what's already |
56 | been done in other plugins; as such it imports "bootimg-pcbios" | 56 | been done in other plugins; as such it imports "bootimg_pcbios" |
57 | and "bootimg-efi". | 57 | and "bootimg_efi". |
58 | Plugin "bootimg-pcbios" is used to generate legacy BIOS boot. | 58 | Plugin "bootimg_pcbios" is used to generate legacy BIOS boot. |
59 | Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it | 59 | Plugin "bootimg_efi" is used to generate the UEFI boot. Note that it |
60 | requires a --sourceparams argument to know which loader to use; refer | 60 | requires a --sourceparams argument to know which loader to use; refer |
61 | to "bootimg-efi" code/documentation for the list of loader. | 61 | to "bootimg_efi" code/documentation for the list of loader. |
62 | 62 | ||
63 | Imports are handled with "SourceFileLoader" from importlib as it is | 63 | Imports are handled with "SourceFileLoader" from importlib as it is |
64 | otherwise very difficult to import module that has hyphen "-" in their | 64 | otherwise very difficult to import module that has hyphen "-" in their |
65 | filename. | 65 | filename. |
66 | The SourcePlugin() methods used in the plugins (do_install_disk, | 66 | The SourcePlugin() methods used in the plugins (do_install_disk, |
67 | do_configure_partition, do_prepare_partition) are then called on both, | 67 | do_configure_partition, do_prepare_partition) are then called on both, |
68 | beginning by "bootimg-efi". | 68 | beginning by "bootimg_efi". |
69 | 69 | ||
70 | Plugin options, such as "--sourceparams" can still be passed to a | 70 | Plugin options, such as "--sourceparams" can still be passed to a |
71 | plugin, as long they does not cause issue in the other plugin. | 71 | plugin, as long they does not cause issue in the other plugin. |
72 | 72 | ||
73 | Example wic configuration: | 73 | Example wic configuration: |
74 | part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\ | 74 | part /boot --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\\ |
75 | --ondisk sda --label os_boot --active --align 1024 --use-uuid | 75 | --ondisk sda --label os_boot --active --align 1024 --use-uuid |
76 | """ | 76 | """ |
77 | 77 | ||
78 | name = 'bootimg-biosplusefi' | 78 | name = 'bootimg_biosplusefi' |
79 | 79 | ||
80 | __PCBIOS_MODULE_NAME = "bootimg-pcbios" | 80 | __PCBIOS_MODULE_NAME = "bootimg_pcbios" |
81 | __EFI_MODULE_NAME = "bootimg-efi" | 81 | __EFI_MODULE_NAME = "bootimg_efi" |
82 | 82 | ||
83 | __imgEFIObj = None | 83 | __imgEFIObj = None |
84 | __imgBiosObj = None | 84 | __imgBiosObj = None |
@@ -100,7 +100,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
100 | 100 | ||
101 | """ | 101 | """ |
102 | 102 | ||
103 | # Import bootimg-pcbios (class name "BootimgPcbiosPlugin") | 103 | # Import bootimg_pcbios (class name "BootimgPcbiosPlugin") |
104 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), | 104 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), |
105 | cls.__PCBIOS_MODULE_NAME + ".py") | 105 | cls.__PCBIOS_MODULE_NAME + ".py") |
106 | loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) | 106 | loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) |
@@ -108,7 +108,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
108 | loader.exec_module(mod) | 108 | loader.exec_module(mod) |
109 | cls.__imgBiosObj = mod.BootimgPcbiosPlugin() | 109 | cls.__imgBiosObj = mod.BootimgPcbiosPlugin() |
110 | 110 | ||
111 | # Import bootimg-efi (class name "BootimgEFIPlugin") | 111 | # Import bootimg_efi (class name "BootimgEFIPlugin") |
112 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), | 112 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), |
113 | cls.__EFI_MODULE_NAME + ".py") | 113 | cls.__EFI_MODULE_NAME + ".py") |
114 | loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) | 114 | loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg_efi.py index 0391aebdc8..cf16705a28 100644 --- a/scripts/lib/wic/plugins/source/bootimg-efi.py +++ b/scripts/lib/wic/plugins/source/bootimg_efi.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'bootimg-efi' source plugin class for 'wic' | 7 | # This implements the 'bootimg_efi' source plugin class for 'wic' |
8 | # | 8 | # |
9 | # AUTHORS | 9 | # AUTHORS |
10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> |
@@ -32,7 +32,29 @@ class BootimgEFIPlugin(SourcePlugin): | |||
32 | This plugin supports GRUB 2 and systemd-boot bootloaders. | 32 | This plugin supports GRUB 2 and systemd-boot bootloaders. |
33 | """ | 33 | """ |
34 | 34 | ||
35 | name = 'bootimg-efi' | 35 | name = 'bootimg_efi' |
36 | |||
37 | @classmethod | ||
38 | def _copy_additional_files(cls, hdddir, initrd, dtb): | ||
39 | bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
40 | if not bootimg_dir: | ||
41 | raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") | ||
42 | |||
43 | if initrd: | ||
44 | initrds = initrd.split(';') | ||
45 | for rd in initrds: | ||
46 | cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, rd, hdddir) | ||
47 | out = exec_cmd(cp_cmd, True) | ||
48 | logger.debug("initrd files:\n%s" % (out)) | ||
49 | else: | ||
50 | logger.debug("Ignoring missing initrd") | ||
51 | |||
52 | if dtb: | ||
53 | if ';' in dtb: | ||
54 | raise WicError("Only one DTB supported, exiting") | ||
55 | cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, dtb, hdddir) | ||
56 | out = exec_cmd(cp_cmd, True) | ||
57 | logger.debug("dtb files:\n%s" % (out)) | ||
36 | 58 | ||
37 | @classmethod | 59 | @classmethod |
38 | def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): | 60 | def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): |
@@ -53,18 +75,9 @@ class BootimgEFIPlugin(SourcePlugin): | |||
53 | "get it from %s." % configfile) | 75 | "get it from %s." % configfile) |
54 | 76 | ||
55 | initrd = source_params.get('initrd') | 77 | initrd = source_params.get('initrd') |
78 | dtb = source_params.get('dtb') | ||
56 | 79 | ||
57 | if initrd: | 80 | cls._copy_additional_files(hdddir, initrd, dtb) |
58 | bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
59 | if not bootimg_dir: | ||
60 | raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") | ||
61 | |||
62 | initrds = initrd.split(';') | ||
63 | for rd in initrds: | ||
64 | cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir) | ||
65 | exec_cmd(cp_cmd, True) | ||
66 | else: | ||
67 | logger.debug("Ignoring missing initrd") | ||
68 | 81 | ||
69 | if not custom_cfg: | 82 | if not custom_cfg: |
70 | # Create grub configuration using parameters from wks file | 83 | # Create grub configuration using parameters from wks file |
@@ -98,6 +111,9 @@ class BootimgEFIPlugin(SourcePlugin): | |||
98 | grubefi_conf += " /%s" % rd | 111 | grubefi_conf += " /%s" % rd |
99 | grubefi_conf += "\n" | 112 | grubefi_conf += "\n" |
100 | 113 | ||
114 | if dtb: | ||
115 | grubefi_conf += "devicetree /%s\n" % dtb | ||
116 | |||
101 | grubefi_conf += "}\n" | 117 | grubefi_conf += "}\n" |
102 | 118 | ||
103 | logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg", | 119 | logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg", |
@@ -109,8 +125,16 @@ class BootimgEFIPlugin(SourcePlugin): | |||
109 | @classmethod | 125 | @classmethod |
110 | def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): | 126 | def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): |
111 | """ | 127 | """ |
112 | Create loader-specific systemd-boot/gummiboot config | 128 | Create loader-specific systemd-boot/gummiboot config. Unified Kernel Image (uki) |
129 | support is done in image recipe with uki.bbclass and only systemd-boot loader config | ||
130 | and ESP partition structure is created here. | ||
113 | """ | 131 | """ |
132 | # detect uki.bbclass usage | ||
133 | image_classes = get_bitbake_var("IMAGE_CLASSES").split() | ||
134 | unified_image = False | ||
135 | if "uki" in image_classes: | ||
136 | unified_image = True | ||
137 | |||
114 | install_cmd = "install -d %s/loader" % hdddir | 138 | install_cmd = "install -d %s/loader" % hdddir |
115 | exec_cmd(install_cmd) | 139 | exec_cmd(install_cmd) |
116 | 140 | ||
@@ -118,35 +142,26 @@ class BootimgEFIPlugin(SourcePlugin): | |||
118 | exec_cmd(install_cmd) | 142 | exec_cmd(install_cmd) |
119 | 143 | ||
120 | bootloader = creator.ks.bootloader | 144 | bootloader = creator.ks.bootloader |
121 | |||
122 | loader_conf = "" | 145 | loader_conf = "" |
123 | if source_params.get('create-unified-kernel-image') != "true": | ||
124 | loader_conf += "default boot\n" | ||
125 | loader_conf += "timeout %d\n" % bootloader.timeout | ||
126 | 146 | ||
127 | initrd = source_params.get('initrd') | 147 | # 5 seconds is a sensible default timeout |
128 | 148 | loader_conf += "timeout %d\n" % (bootloader.timeout or 5) | |
129 | if initrd and source_params.get('create-unified-kernel-image') != "true": | ||
130 | # obviously we need to have a common common deploy var | ||
131 | bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
132 | if not bootimg_dir: | ||
133 | raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") | ||
134 | |||
135 | initrds = initrd.split(';') | ||
136 | for rd in initrds: | ||
137 | cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir) | ||
138 | exec_cmd(cp_cmd, True) | ||
139 | else: | ||
140 | logger.debug("Ignoring missing initrd") | ||
141 | 149 | ||
142 | logger.debug("Writing systemd-boot config " | 150 | logger.debug("Writing systemd-boot config " |
143 | "%s/hdd/boot/loader/loader.conf", cr_workdir) | 151 | "%s/hdd/boot/loader/loader.conf", cr_workdir) |
144 | cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") | 152 | cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") |
145 | cfg.write(loader_conf) | 153 | cfg.write(loader_conf) |
154 | logger.debug("loader.conf:\n%s" % (loader_conf)) | ||
146 | cfg.close() | 155 | cfg.close() |
147 | 156 | ||
157 | initrd = source_params.get('initrd') | ||
158 | dtb = source_params.get('dtb') | ||
159 | if not unified_image: | ||
160 | cls._copy_additional_files(hdddir, initrd, dtb) | ||
161 | |||
148 | configfile = creator.ks.bootloader.configfile | 162 | configfile = creator.ks.bootloader.configfile |
149 | custom_cfg = None | 163 | custom_cfg = None |
164 | boot_conf = "" | ||
150 | if configfile: | 165 | if configfile: |
151 | custom_cfg = get_custom_config(configfile) | 166 | custom_cfg = get_custom_config(configfile) |
152 | if custom_cfg: | 167 | if custom_cfg: |
@@ -157,8 +172,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
157 | else: | 172 | else: |
158 | raise WicError("configfile is specified but failed to " | 173 | raise WicError("configfile is specified but failed to " |
159 | "get it from %s.", configfile) | 174 | "get it from %s.", configfile) |
160 | 175 | else: | |
161 | if not custom_cfg: | ||
162 | # Create systemd-boot configuration using parameters from wks file | 176 | # Create systemd-boot configuration using parameters from wks file |
163 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | 177 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") |
164 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": | 178 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": |
@@ -168,7 +182,6 @@ class BootimgEFIPlugin(SourcePlugin): | |||
168 | 182 | ||
169 | title = source_params.get('title') | 183 | title = source_params.get('title') |
170 | 184 | ||
171 | boot_conf = "" | ||
172 | boot_conf += "title %s\n" % (title if title else "boot") | 185 | boot_conf += "title %s\n" % (title if title else "boot") |
173 | boot_conf += "linux /%s\n" % kernel | 186 | boot_conf += "linux /%s\n" % kernel |
174 | 187 | ||
@@ -185,11 +198,15 @@ class BootimgEFIPlugin(SourcePlugin): | |||
185 | for rd in initrds: | 198 | for rd in initrds: |
186 | boot_conf += "initrd /%s\n" % rd | 199 | boot_conf += "initrd /%s\n" % rd |
187 | 200 | ||
188 | if source_params.get('create-unified-kernel-image') != "true": | 201 | if dtb: |
202 | boot_conf += "devicetree /%s\n" % dtb | ||
203 | |||
204 | if not unified_image: | ||
189 | logger.debug("Writing systemd-boot config " | 205 | logger.debug("Writing systemd-boot config " |
190 | "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) | 206 | "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) |
191 | cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") | 207 | cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") |
192 | cfg.write(boot_conf) | 208 | cfg.write(boot_conf) |
209 | logger.debug("boot.conf:\n%s" % (boot_conf)) | ||
193 | cfg.close() | 210 | cfg.close() |
194 | 211 | ||
195 | 212 | ||
@@ -210,10 +227,12 @@ class BootimgEFIPlugin(SourcePlugin): | |||
210 | cls.do_configure_grubefi(hdddir, creator, cr_workdir, source_params) | 227 | cls.do_configure_grubefi(hdddir, creator, cr_workdir, source_params) |
211 | elif source_params['loader'] == 'systemd-boot': | 228 | elif source_params['loader'] == 'systemd-boot': |
212 | cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params) | 229 | cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params) |
230 | elif source_params['loader'] == 'uefi-kernel': | ||
231 | pass | ||
213 | else: | 232 | else: |
214 | raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) | 233 | raise WicError("unrecognized bootimg_efi loader: %s" % source_params['loader']) |
215 | except KeyError: | 234 | except KeyError: |
216 | raise WicError("bootimg-efi requires a loader, none specified") | 235 | raise WicError("bootimg_efi requires a loader, none specified") |
217 | 236 | ||
218 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: | 237 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: |
219 | logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') | 238 | logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') |
@@ -233,7 +252,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
233 | 252 | ||
234 | # list of tuples (src_name, dst_name) | 253 | # list of tuples (src_name, dst_name) |
235 | deploy_files = [] | 254 | deploy_files = [] |
236 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | 255 | for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files): |
237 | if ';' in src_entry: | 256 | if ';' in src_entry: |
238 | dst_entry = tuple(src_entry.split(';')) | 257 | dst_entry = tuple(src_entry.split(';')) |
239 | if not dst_entry[0] or not dst_entry[1]: | 258 | if not dst_entry[0] or not dst_entry[1]: |
@@ -292,90 +311,83 @@ class BootimgEFIPlugin(SourcePlugin): | |||
292 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) | 311 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) |
293 | 312 | ||
294 | if source_params.get('create-unified-kernel-image') == "true": | 313 | if source_params.get('create-unified-kernel-image') == "true": |
295 | initrd = source_params.get('initrd') | 314 | raise WicError("create-unified-kernel-image is no longer supported. Please use uki.bbclass.") |
296 | if not initrd: | ||
297 | raise WicError("initrd= must be specified when create-unified-kernel-image=true, exiting") | ||
298 | |||
299 | deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
300 | efi_stub = glob("%s/%s" % (deploy_dir, "linux*.efi.stub")) | ||
301 | if len(efi_stub) == 0: | ||
302 | raise WicError("Unified Kernel Image EFI stub not found, exiting") | ||
303 | efi_stub = efi_stub[0] | ||
304 | |||
305 | with tempfile.TemporaryDirectory() as tmp_dir: | ||
306 | label = source_params.get('label') | ||
307 | label_conf = "root=%s" % creator.rootdev | ||
308 | if label: | ||
309 | label_conf = "LABEL=%s" % label | ||
310 | |||
311 | bootloader = creator.ks.bootloader | ||
312 | cmdline = open("%s/cmdline" % tmp_dir, "w") | ||
313 | cmdline.write("%s %s" % (label_conf, bootloader.append)) | ||
314 | cmdline.close() | ||
315 | 315 | ||
316 | initrds = initrd.split(';') | 316 | if source_params.get('install-kernel-into-boot-dir') != 'false': |
317 | initrd = open("%s/initrd" % tmp_dir, "wb") | 317 | install_cmd = "install -v -p -m 0644 %s/%s %s/%s" % \ |
318 | for f in initrds: | ||
319 | with open("%s/%s" % (deploy_dir, f), 'rb') as in_file: | ||
320 | shutil.copyfileobj(in_file, initrd) | ||
321 | initrd.close() | ||
322 | |||
323 | # Searched by systemd-boot: | ||
324 | # https://systemd.io/BOOT_LOADER_SPECIFICATION/#type-2-efi-unified-kernel-images | ||
325 | install_cmd = "install -d %s/EFI/Linux" % hdddir | ||
326 | exec_cmd(install_cmd) | ||
327 | |||
328 | staging_dir_host = get_bitbake_var("STAGING_DIR_HOST") | ||
329 | |||
330 | # https://www.freedesktop.org/software/systemd/man/systemd-stub.html | ||
331 | objcopy_cmd = "objcopy \ | ||
332 | --add-section .osrel=%s --change-section-vma .osrel=0x20000 \ | ||
333 | --add-section .cmdline=%s --change-section-vma .cmdline=0x30000 \ | ||
334 | --add-section .linux=%s --change-section-vma .linux=0x2000000 \ | ||
335 | --add-section .initrd=%s --change-section-vma .initrd=0x3000000 \ | ||
336 | %s %s" % \ | ||
337 | ("%s/usr/lib/os-release" % staging_dir_host, | ||
338 | cmdline.name, | ||
339 | "%s/%s" % (staging_kernel_dir, kernel), | ||
340 | initrd.name, | ||
341 | efi_stub, | ||
342 | "%s/EFI/Linux/linux.efi" % hdddir) | ||
343 | exec_cmd(objcopy_cmd) | ||
344 | else: | ||
345 | install_cmd = "install -m 0644 %s/%s %s/%s" % \ | ||
346 | (staging_kernel_dir, kernel, hdddir, kernel) | 318 | (staging_kernel_dir, kernel, hdddir, kernel) |
347 | exec_cmd(install_cmd) | 319 | out = exec_cmd(install_cmd) |
320 | logger.debug("Installed kernel files:\n%s" % out) | ||
348 | 321 | ||
349 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): | 322 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): |
350 | for src_path, dst_path in cls.install_task: | 323 | for src_path, dst_path in cls.install_task: |
351 | install_cmd = "install -m 0644 -D %s %s" \ | 324 | install_cmd = "install -v -p -m 0644 -D %s %s" \ |
352 | % (os.path.join(kernel_dir, src_path), | 325 | % (os.path.join(kernel_dir, src_path), |
353 | os.path.join(hdddir, dst_path)) | 326 | os.path.join(hdddir, dst_path)) |
354 | exec_cmd(install_cmd) | 327 | out = exec_cmd(install_cmd) |
328 | logger.debug("Installed IMAGE_EFI_BOOT_FILES:\n%s" % out) | ||
355 | 329 | ||
356 | try: | 330 | try: |
357 | if source_params['loader'] == 'grub-efi': | 331 | if source_params['loader'] == 'grub-efi': |
358 | shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, | 332 | shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, |
359 | "%s/grub.cfg" % cr_workdir) | 333 | "%s/grub.cfg" % cr_workdir) |
360 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: | 334 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: |
361 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) | 335 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) |
362 | exec_cmd(cp_cmd, True) | 336 | exec_cmd(cp_cmd, True) |
363 | shutil.move("%s/grub.cfg" % cr_workdir, | 337 | shutil.move("%s/grub.cfg" % cr_workdir, |
364 | "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) | 338 | "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) |
365 | elif source_params['loader'] == 'systemd-boot': | 339 | elif source_params['loader'] == 'systemd-boot': |
366 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: | 340 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: |
367 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) | 341 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) |
368 | exec_cmd(cp_cmd, True) | 342 | out = exec_cmd(cp_cmd, True) |
343 | logger.debug("systemd-boot files:\n%s" % out) | ||
344 | elif source_params['loader'] == 'uefi-kernel': | ||
345 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | ||
346 | if not kernel: | ||
347 | raise WicError("Empty KERNEL_IMAGETYPE") | ||
348 | target = get_bitbake_var("TARGET_SYS") | ||
349 | if not target: | ||
350 | raise WicError("Empty TARGET_SYS") | ||
351 | |||
352 | if re.match("x86_64", target): | ||
353 | kernel_efi_image = "bootx64.efi" | ||
354 | elif re.match('i.86', target): | ||
355 | kernel_efi_image = "bootia32.efi" | ||
356 | elif re.match('aarch64', target): | ||
357 | kernel_efi_image = "bootaa64.efi" | ||
358 | elif re.match('arm', target): | ||
359 | kernel_efi_image = "bootarm.efi" | ||
360 | else: | ||
361 | raise WicError("UEFI stub kernel is incompatible with target %s" % target) | ||
362 | |||
363 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: | ||
364 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) | ||
365 | out = exec_cmd(cp_cmd, True) | ||
366 | logger.debug("uefi-kernel files:\n%s" % out) | ||
369 | else: | 367 | else: |
370 | raise WicError("unrecognized bootimg-efi loader: %s" % | 368 | raise WicError("unrecognized bootimg_efi loader: %s" % |
371 | source_params['loader']) | 369 | source_params['loader']) |
370 | |||
371 | # must have installed at least one EFI bootloader | ||
372 | out = glob(os.path.join(hdddir, 'EFI', 'BOOT', 'boot*.efi')) | ||
373 | logger.debug("Installed EFI loader files:\n%s" % out) | ||
374 | if not out: | ||
375 | raise WicError("No EFI loaders installed to ESP partition. Check that grub-efi, systemd-boot or similar is installed.") | ||
376 | |||
372 | except KeyError: | 377 | except KeyError: |
373 | raise WicError("bootimg-efi requires a loader, none specified") | 378 | raise WicError("bootimg_efi requires a loader, none specified") |
374 | 379 | ||
375 | startup = os.path.join(kernel_dir, "startup.nsh") | 380 | startup = os.path.join(kernel_dir, "startup.nsh") |
376 | if os.path.exists(startup): | 381 | if os.path.exists(startup): |
377 | cp_cmd = "cp %s %s/" % (startup, hdddir) | 382 | cp_cmd = "cp -v -p %s %s/" % (startup, hdddir) |
378 | exec_cmd(cp_cmd, True) | 383 | out = exec_cmd(cp_cmd, True) |
384 | logger.debug("startup files:\n%s" % out) | ||
385 | |||
386 | for paths in part.include_path or []: | ||
387 | for path in paths: | ||
388 | cp_cmd = "cp -v -p -r %s %s/" % (path, hdddir) | ||
389 | exec_cmd(cp_cmd, True) | ||
390 | logger.debug("include_path files:\n%s" % out) | ||
379 | 391 | ||
380 | du_cmd = "du -bks %s" % hdddir | 392 | du_cmd = "du -bks %s" % hdddir |
381 | out = exec_cmd(du_cmd) | 393 | out = exec_cmd(du_cmd) |
@@ -391,17 +403,26 @@ class BootimgEFIPlugin(SourcePlugin): | |||
391 | logger.debug("Added %d extra blocks to %s to get to %d total blocks", | 403 | logger.debug("Added %d extra blocks to %s to get to %d total blocks", |
392 | extra_blocks, part.mountpoint, blocks) | 404 | extra_blocks, part.mountpoint, blocks) |
393 | 405 | ||
406 | # required for compatibility with certain devices expecting file system | ||
407 | # block count to be equal to partition block count | ||
408 | if blocks < part.fixed_size: | ||
409 | blocks = part.fixed_size | ||
410 | logger.debug("Overriding %s to %d total blocks for compatibility", | ||
411 | part.mountpoint, blocks) | ||
412 | |||
394 | # dosfs image, created by mkdosfs | 413 | # dosfs image, created by mkdosfs |
395 | bootimg = "%s/boot.img" % cr_workdir | 414 | bootimg = "%s/boot.img" % cr_workdir |
396 | 415 | ||
397 | label = part.label if part.label else "ESP" | 416 | label = part.label if part.label else "ESP" |
398 | 417 | ||
399 | dosfs_cmd = "mkdosfs -n %s -i %s -C %s %d" % \ | 418 | dosfs_cmd = "mkdosfs -v -n %s -i %s -C %s %d" % \ |
400 | (label, part.fsuuid, bootimg, blocks) | 419 | (label, part.fsuuid, bootimg, blocks) |
401 | exec_native_cmd(dosfs_cmd, native_sysroot) | 420 | exec_native_cmd(dosfs_cmd, native_sysroot) |
421 | logger.debug("mkdosfs:\n%s" % (str(out))) | ||
402 | 422 | ||
403 | mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) | 423 | mcopy_cmd = "mcopy -v -p -i %s -s %s/* ::/" % (bootimg, hdddir) |
404 | exec_native_cmd(mcopy_cmd, native_sysroot) | 424 | out = exec_native_cmd(mcopy_cmd, native_sysroot) |
425 | logger.debug("mcopy:\n%s" % (str(out))) | ||
405 | 426 | ||
406 | chmod_cmd = "chmod 644 %s" % bootimg | 427 | chmod_cmd = "chmod 644 %s" % bootimg |
407 | exec_cmd(chmod_cmd) | 428 | exec_cmd(chmod_cmd) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg_partition.py index 5dbe2558d2..cc121a78f0 100644 --- a/scripts/lib/wic/plugins/source/bootimg-partition.py +++ b/scripts/lib/wic/plugins/source/bootimg_partition.py | |||
@@ -1,8 +1,10 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | # DESCRIPTION | 6 | # DESCRIPTION |
5 | # This implements the 'bootimg-partition' source plugin class for | 7 | # This implements the 'bootimg_partition' source plugin class for |
6 | # 'wic'. The plugin creates an image of boot partition, copying over | 8 | # 'wic'. The plugin creates an image of boot partition, copying over |
7 | # files listed in IMAGE_BOOT_FILES bitbake variable. | 9 | # files listed in IMAGE_BOOT_FILES bitbake variable. |
8 | # | 10 | # |
@@ -14,7 +16,7 @@ import logging | |||
14 | import os | 16 | import os |
15 | import re | 17 | import re |
16 | 18 | ||
17 | from glob import glob | 19 | from oe.bootfiles import get_boot_files |
18 | 20 | ||
19 | from wic import WicError | 21 | from wic import WicError |
20 | from wic.engine import get_custom_config | 22 | from wic.engine import get_custom_config |
@@ -29,7 +31,8 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
29 | listed in IMAGE_BOOT_FILES bitbake variable. | 31 | listed in IMAGE_BOOT_FILES bitbake variable. |
30 | """ | 32 | """ |
31 | 33 | ||
32 | name = 'bootimg-partition' | 34 | name = 'bootimg_partition' |
35 | image_boot_files_var_name = 'IMAGE_BOOT_FILES' | ||
33 | 36 | ||
34 | @classmethod | 37 | @classmethod |
35 | def do_configure_partition(cls, part, source_params, cr, cr_workdir, | 38 | def do_configure_partition(cls, part, source_params, cr, cr_workdir, |
@@ -54,51 +57,16 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
54 | else: | 57 | else: |
55 | var = "" | 58 | var = "" |
56 | 59 | ||
57 | boot_files = get_bitbake_var("IMAGE_BOOT_FILES" + var) | 60 | boot_files = get_bitbake_var(cls.image_boot_files_var_name + var) |
58 | if boot_files is not None: | 61 | if boot_files is not None: |
59 | break | 62 | break |
60 | 63 | ||
61 | if boot_files is None: | 64 | if boot_files is None: |
62 | raise WicError('No boot files defined, IMAGE_BOOT_FILES unset for entry #%d' % part.lineno) | 65 | raise WicError('No boot files defined, %s unset for entry #%d' % (cls.image_boot_files_var_name, part.lineno)) |
63 | 66 | ||
64 | logger.debug('Boot files: %s', boot_files) | 67 | logger.debug('Boot files: %s', boot_files) |
65 | 68 | ||
66 | # list of tuples (src_name, dst_name) | 69 | cls.install_task = get_boot_files(kernel_dir, boot_files) |
67 | deploy_files = [] | ||
68 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | ||
69 | if ';' in src_entry: | ||
70 | dst_entry = tuple(src_entry.split(';')) | ||
71 | if not dst_entry[0] or not dst_entry[1]: | ||
72 | raise WicError('Malformed boot file entry: %s' % src_entry) | ||
73 | else: | ||
74 | dst_entry = (src_entry, src_entry) | ||
75 | |||
76 | logger.debug('Destination entry: %r', dst_entry) | ||
77 | deploy_files.append(dst_entry) | ||
78 | |||
79 | cls.install_task = []; | ||
80 | for deploy_entry in deploy_files: | ||
81 | src, dst = deploy_entry | ||
82 | if '*' in src: | ||
83 | # by default install files under their basename | ||
84 | entry_name_fn = os.path.basename | ||
85 | if dst != src: | ||
86 | # unless a target name was given, then treat name | ||
87 | # as a directory and append a basename | ||
88 | entry_name_fn = lambda name: \ | ||
89 | os.path.join(dst, | ||
90 | os.path.basename(name)) | ||
91 | |||
92 | srcs = glob(os.path.join(kernel_dir, src)) | ||
93 | |||
94 | logger.debug('Globbed sources: %s', ', '.join(srcs)) | ||
95 | for entry in srcs: | ||
96 | src = os.path.relpath(entry, kernel_dir) | ||
97 | entry_dst_name = entry_name_fn(entry) | ||
98 | cls.install_task.append((src, entry_dst_name)) | ||
99 | else: | ||
100 | cls.install_task.append((src, dst)) | ||
101 | |||
102 | if source_params.get('loader') != "u-boot": | 70 | if source_params.get('loader') != "u-boot": |
103 | return | 71 | return |
104 | 72 | ||
@@ -110,7 +78,7 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
110 | # Use a custom configuration for extlinux.conf | 78 | # Use a custom configuration for extlinux.conf |
111 | extlinux_conf = custom_cfg | 79 | extlinux_conf = custom_cfg |
112 | logger.debug("Using custom configuration file " | 80 | logger.debug("Using custom configuration file " |
113 | "%s for extlinux.cfg", configfile) | 81 | "%s for extlinux.conf", configfile) |
114 | else: | 82 | else: |
115 | raise WicError("configfile is specified but failed to " | 83 | raise WicError("configfile is specified but failed to " |
116 | "get it from %s." % configfile) | 84 | "get it from %s." % configfile) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg_pcbios.py index 32e47f1831..21f41e00bb 100644 --- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py +++ b/scripts/lib/wic/plugins/source/bootimg_pcbios.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'bootimg-pcbios' source plugin class for 'wic' | 7 | # This implements the 'bootimg_pcbios' source plugin class for 'wic' |
8 | # | 8 | # |
9 | # AUTHORS | 9 | # AUTHORS |
10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> |
@@ -27,7 +27,7 @@ class BootimgPcbiosPlugin(SourcePlugin): | |||
27 | Create MBR boot partition and install syslinux on it. | 27 | Create MBR boot partition and install syslinux on it. |
28 | """ | 28 | """ |
29 | 29 | ||
30 | name = 'bootimg-pcbios' | 30 | name = 'bootimg_pcbios' |
31 | 31 | ||
32 | @classmethod | 32 | @classmethod |
33 | def _get_bootimg_dir(cls, bootimg_dir, dirname): | 33 | def _get_bootimg_dir(cls, bootimg_dir, dirname): |
@@ -122,7 +122,7 @@ class BootimgPcbiosPlugin(SourcePlugin): | |||
122 | syslinux_conf += "DEFAULT boot\n" | 122 | syslinux_conf += "DEFAULT boot\n" |
123 | syslinux_conf += "LABEL boot\n" | 123 | syslinux_conf += "LABEL boot\n" |
124 | 124 | ||
125 | kernel = "/vmlinuz" | 125 | kernel = "/" + get_bitbake_var("KERNEL_IMAGETYPE") |
126 | syslinux_conf += "KERNEL " + kernel + "\n" | 126 | syslinux_conf += "KERNEL " + kernel + "\n" |
127 | 127 | ||
128 | syslinux_conf += "APPEND label=boot root=%s %s\n" % \ | 128 | syslinux_conf += "APPEND label=boot root=%s %s\n" % \ |
@@ -155,8 +155,8 @@ class BootimgPcbiosPlugin(SourcePlugin): | |||
155 | kernel = "%s-%s.bin" % \ | 155 | kernel = "%s-%s.bin" % \ |
156 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) | 156 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) |
157 | 157 | ||
158 | cmds = ("install -m 0644 %s/%s %s/vmlinuz" % | 158 | cmds = ("install -m 0644 %s/%s %s/%s" % |
159 | (staging_kernel_dir, kernel, hdddir), | 159 | (staging_kernel_dir, kernel, hdddir, get_bitbake_var("KERNEL_IMAGETYPE")), |
160 | "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" % | 160 | "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" % |
161 | (bootimg_dir, hdddir), | 161 | (bootimg_dir, hdddir), |
162 | "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" % | 162 | "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" % |
diff --git a/scripts/lib/wic/plugins/source/empty.py b/scripts/lib/wic/plugins/source/empty.py index 041617d648..4178912377 100644 --- a/scripts/lib/wic/plugins/source/empty.py +++ b/scripts/lib/wic/plugins/source/empty.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -7,9 +9,19 @@ | |||
7 | # To use it you must pass "empty" as argument for the "--source" parameter in | 9 | # To use it you must pass "empty" as argument for the "--source" parameter in |
8 | # the wks file. For example: | 10 | # the wks file. For example: |
9 | # part foo --source empty --ondisk sda --size="1024" --align 1024 | 11 | # part foo --source empty --ondisk sda --size="1024" --align 1024 |
12 | # | ||
13 | # The plugin supports writing zeros to the start of the | ||
14 | # partition. This is useful to overwrite old content like | ||
15 | # filesystem signatures which may be re-recognized otherwise. | ||
16 | # This feature can be enabled with | ||
17 | # '--sourceparams="[fill|size=<N>[S|s|K|k|M|G]][,][bs=<N>[S|s|K|k|M|G]]"' | ||
18 | # Conflicting or missing options throw errors. | ||
10 | 19 | ||
11 | import logging | 20 | import logging |
21 | import os | ||
12 | 22 | ||
23 | from wic import WicError | ||
24 | from wic.ksparser import sizetype | ||
13 | from wic.pluginbase import SourcePlugin | 25 | from wic.pluginbase import SourcePlugin |
14 | 26 | ||
15 | logger = logging.getLogger('wic') | 27 | logger = logging.getLogger('wic') |
@@ -17,6 +29,16 @@ logger = logging.getLogger('wic') | |||
17 | class EmptyPartitionPlugin(SourcePlugin): | 29 | class EmptyPartitionPlugin(SourcePlugin): |
18 | """ | 30 | """ |
19 | Populate unformatted empty partition. | 31 | Populate unformatted empty partition. |
32 | |||
33 | The following sourceparams are supported: | ||
34 | - fill | ||
35 | Fill the entire partition with zeros. Requires '--fixed-size' option | ||
36 | to be set. | ||
37 | - size=<N>[S|s|K|k|M|G] | ||
38 | Set the first N bytes of the partition to zero. Default unit is 'K'. | ||
39 | - bs=<N>[S|s|K|k|M|G] | ||
40 | Write at most N bytes at a time during source file creation. | ||
41 | Defaults to '1M'. Default unit is 'K'. | ||
20 | """ | 42 | """ |
21 | 43 | ||
22 | name = 'empty' | 44 | name = 'empty' |
@@ -29,4 +51,39 @@ class EmptyPartitionPlugin(SourcePlugin): | |||
29 | Called to do the actual content population for a partition i.e. it | 51 | Called to do the actual content population for a partition i.e. it |
30 | 'prepares' the partition to be incorporated into the image. | 52 | 'prepares' the partition to be incorporated into the image. |
31 | """ | 53 | """ |
32 | return | 54 | get_byte_count = sizetype('K', True) |
55 | size = 0 | ||
56 | |||
57 | if 'fill' in source_params and 'size' in source_params: | ||
58 | raise WicError("Conflicting source parameters 'fill' and 'size' specified, exiting.") | ||
59 | |||
60 | # Set the size of the zeros to be written to the partition | ||
61 | if 'fill' in source_params: | ||
62 | if part.fixed_size == 0: | ||
63 | raise WicError("Source parameter 'fill' only works with the '--fixed-size' option, exiting.") | ||
64 | size = get_byte_count(part.fixed_size) | ||
65 | elif 'size' in source_params: | ||
66 | size = get_byte_count(source_params['size']) | ||
67 | |||
68 | if size == 0: | ||
69 | # Nothing to do, create empty partition | ||
70 | return | ||
71 | |||
72 | if 'bs' in source_params: | ||
73 | bs = get_byte_count(source_params['bs']) | ||
74 | else: | ||
75 | bs = get_byte_count('1M') | ||
76 | |||
77 | # Create a binary file of the requested size filled with zeros | ||
78 | source_file = os.path.join(cr_workdir, 'empty-plugin-zeros%s.bin' % part.lineno) | ||
79 | if not os.path.exists(os.path.dirname(source_file)): | ||
80 | os.makedirs(os.path.dirname(source_file)) | ||
81 | |||
82 | quotient, remainder = divmod(size, bs) | ||
83 | with open(source_file, 'wb') as file: | ||
84 | for _ in range(quotient): | ||
85 | file.write(bytearray(bs)) | ||
86 | file.write(bytearray(remainder)) | ||
87 | |||
88 | part.size = (size + 1024 - 1) // 1024 # size in KB rounded up | ||
89 | part.source_file = source_file | ||
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py index afc9ea0f8f..5d42eb5d3e 100644 --- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py +++ b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py | |||
@@ -1,8 +1,10 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | # DESCRIPTION | 6 | # DESCRIPTION |
5 | # This implements the 'isoimage-isohybrid' source plugin class for 'wic' | 7 | # This implements the 'isoimage_isohybrid' source plugin class for 'wic' |
6 | # | 8 | # |
7 | # AUTHORS | 9 | # AUTHORS |
8 | # Mihaly Varga <mihaly.varga (at] ni.com> | 10 | # Mihaly Varga <mihaly.varga (at] ni.com> |
@@ -33,7 +35,7 @@ class IsoImagePlugin(SourcePlugin): | |||
33 | bootloader files. | 35 | bootloader files. |
34 | 36 | ||
35 | Example kickstart file: | 37 | Example kickstart file: |
36 | part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi, \\ | 38 | part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi, \\ |
37 | image_name= IsoImage" --ondisk cd --label LIVECD | 39 | image_name= IsoImage" --ondisk cd --label LIVECD |
38 | bootloader --timeout=10 --append=" " | 40 | bootloader --timeout=10 --append=" " |
39 | 41 | ||
@@ -43,7 +45,7 @@ class IsoImagePlugin(SourcePlugin): | |||
43 | extension added by direct imeger plugin) and a file named IsoImage-cd.iso | 45 | extension added by direct imeger plugin) and a file named IsoImage-cd.iso |
44 | """ | 46 | """ |
45 | 47 | ||
46 | name = 'isoimage-isohybrid' | 48 | name = 'isoimage_isohybrid' |
47 | 49 | ||
48 | @classmethod | 50 | @classmethod |
49 | def do_configure_syslinux(cls, creator, cr_workdir): | 51 | def do_configure_syslinux(cls, creator, cr_workdir): |
@@ -338,10 +340,10 @@ class IsoImagePlugin(SourcePlugin): | |||
338 | cls.do_configure_grubefi(part, creator, target_dir) | 340 | cls.do_configure_grubefi(part, creator, target_dir) |
339 | 341 | ||
340 | else: | 342 | else: |
341 | raise WicError("unrecognized bootimg-efi loader: %s" % | 343 | raise WicError("unrecognized bootimg_efi loader: %s" % |
342 | source_params['loader']) | 344 | source_params['loader']) |
343 | except KeyError: | 345 | except KeyError: |
344 | raise WicError("bootimg-efi requires a loader, none specified") | 346 | raise WicError("bootimg_efi requires a loader, none specified") |
345 | 347 | ||
346 | # Create efi.img that contains bootloader files for EFI booting | 348 | # Create efi.img that contains bootloader files for EFI booting |
347 | # if ISODIR didn't exist or didn't contains it | 349 | # if ISODIR didn't exist or didn't contains it |
diff --git a/scripts/lib/wic/plugins/source/rawcopy.py b/scripts/lib/wic/plugins/source/rawcopy.py index fa7b1eb8ac..21903c2f23 100644 --- a/scripts/lib/wic/plugins/source/rawcopy.py +++ b/scripts/lib/wic/plugins/source/rawcopy.py | |||
@@ -1,9 +1,13 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
4 | 6 | ||
5 | import logging | 7 | import logging |
6 | import os | 8 | import os |
9 | import signal | ||
10 | import subprocess | ||
7 | 11 | ||
8 | from wic import WicError | 12 | from wic import WicError |
9 | from wic.pluginbase import SourcePlugin | 13 | from wic.pluginbase import SourcePlugin |
@@ -21,6 +25,10 @@ class RawCopyPlugin(SourcePlugin): | |||
21 | 25 | ||
22 | @staticmethod | 26 | @staticmethod |
23 | def do_image_label(fstype, dst, label): | 27 | def do_image_label(fstype, dst, label): |
28 | # don't create label when fstype is none | ||
29 | if fstype == 'none': | ||
30 | return | ||
31 | |||
24 | if fstype.startswith('ext'): | 32 | if fstype.startswith('ext'): |
25 | cmd = 'tune2fs -L %s %s' % (label, dst) | 33 | cmd = 'tune2fs -L %s %s' % (label, dst) |
26 | elif fstype in ('msdos', 'vfat'): | 34 | elif fstype in ('msdos', 'vfat'): |
@@ -38,6 +46,26 @@ class RawCopyPlugin(SourcePlugin): | |||
38 | 46 | ||
39 | exec_cmd(cmd) | 47 | exec_cmd(cmd) |
40 | 48 | ||
49 | @staticmethod | ||
50 | def do_image_uncompression(src, dst, workdir): | ||
51 | def subprocess_setup(): | ||
52 | # Python installs a SIGPIPE handler by default. This is usually not what | ||
53 | # non-Python subprocesses expect. | ||
54 | # SIGPIPE errors are known issues with gzip/bash | ||
55 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) | ||
56 | |||
57 | extension = os.path.splitext(src)[1] | ||
58 | decompressor = { | ||
59 | ".bz2": "bzip2", | ||
60 | ".gz": "gzip", | ||
61 | ".xz": "xz", | ||
62 | ".zst": "zstd -f", | ||
63 | }.get(extension) | ||
64 | if not decompressor: | ||
65 | raise WicError("Not supported compressor filename extension: %s" % extension) | ||
66 | cmd = "%s -dc %s > %s" % (decompressor, src, dst) | ||
67 | subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=workdir) | ||
68 | |||
41 | @classmethod | 69 | @classmethod |
42 | def do_prepare_partition(cls, part, source_params, cr, cr_workdir, | 70 | def do_prepare_partition(cls, part, source_params, cr, cr_workdir, |
43 | oe_builddir, bootimg_dir, kernel_dir, | 71 | oe_builddir, bootimg_dir, kernel_dir, |
@@ -56,7 +84,13 @@ class RawCopyPlugin(SourcePlugin): | |||
56 | if 'file' not in source_params: | 84 | if 'file' not in source_params: |
57 | raise WicError("No file specified") | 85 | raise WicError("No file specified") |
58 | 86 | ||
59 | src = os.path.join(kernel_dir, source_params['file']) | 87 | if 'unpack' in source_params: |
88 | img = os.path.join(kernel_dir, source_params['file']) | ||
89 | src = os.path.join(cr_workdir, os.path.splitext(source_params['file'])[0]) | ||
90 | RawCopyPlugin.do_image_uncompression(img, src, cr_workdir) | ||
91 | else: | ||
92 | src = os.path.join(kernel_dir, source_params['file']) | ||
93 | |||
60 | dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno)) | 94 | dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno)) |
61 | 95 | ||
62 | if not os.path.exists(os.path.dirname(dst)): | 96 | if not os.path.exists(os.path.dirname(dst)): |
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py index 2e34e715ca..06fce06bb1 100644 --- a/scripts/lib/wic/plugins/source/rootfs.py +++ b/scripts/lib/wic/plugins/source/rootfs.py | |||
@@ -35,22 +35,22 @@ class RootfsPlugin(SourcePlugin): | |||
35 | @staticmethod | 35 | @staticmethod |
36 | def __validate_path(cmd, rootfs_dir, path): | 36 | def __validate_path(cmd, rootfs_dir, path): |
37 | if os.path.isabs(path): | 37 | if os.path.isabs(path): |
38 | logger.error("%s: Must be relative: %s" % (cmd, orig_path)) | 38 | logger.error("%s: Must be relative: %s" % (cmd, path)) |
39 | sys.exit(1) | 39 | sys.exit(1) |
40 | 40 | ||
41 | # Disallow climbing outside of parent directory using '..', | 41 | # Disallow climbing outside of parent directory using '..', |
42 | # because doing so could be quite disastrous (we will delete the | 42 | # because doing so could be quite disastrous (we will delete the |
43 | # directory, or modify a directory outside OpenEmbedded). | 43 | # directory, or modify a directory outside OpenEmbedded). |
44 | full_path = os.path.realpath(os.path.join(rootfs_dir, path)) | 44 | full_path = os.path.abspath(os.path.join(rootfs_dir, path)) |
45 | if not full_path.startswith(os.path.realpath(rootfs_dir)): | 45 | if not full_path.startswith(os.path.realpath(rootfs_dir)): |
46 | logger.error("%s: Must point inside the rootfs:" % (cmd, path)) | 46 | logger.error("%s: Must point inside the rootfs: %s" % (cmd, path)) |
47 | sys.exit(1) | 47 | sys.exit(1) |
48 | 48 | ||
49 | return full_path | 49 | return full_path |
50 | 50 | ||
51 | @staticmethod | 51 | @staticmethod |
52 | def __get_rootfs_dir(rootfs_dir): | 52 | def __get_rootfs_dir(rootfs_dir): |
53 | if os.path.isdir(rootfs_dir): | 53 | if rootfs_dir and os.path.isdir(rootfs_dir): |
54 | return os.path.realpath(rootfs_dir) | 54 | return os.path.realpath(rootfs_dir) |
55 | 55 | ||
56 | image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir) | 56 | image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir) |
@@ -97,6 +97,9 @@ class RootfsPlugin(SourcePlugin): | |||
97 | part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab")) | 97 | part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab")) |
98 | pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo") | 98 | pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo") |
99 | if not os.path.lexists(pseudo_dir): | 99 | if not os.path.lexists(pseudo_dir): |
100 | pseudo_dir = os.path.join(cls.__get_rootfs_dir(None), '../pseudo') | ||
101 | |||
102 | if not os.path.lexists(pseudo_dir): | ||
100 | logger.warn("%s folder does not exist. " | 103 | logger.warn("%s folder does not exist. " |
101 | "Usernames and permissions will be invalid " % pseudo_dir) | 104 | "Usernames and permissions will be invalid " % pseudo_dir) |
102 | pseudo_dir = None | 105 | pseudo_dir = None |
@@ -221,7 +224,7 @@ class RootfsPlugin(SourcePlugin): | |||
221 | if part.update_fstab_in_rootfs and part.has_fstab and not part.no_fstab_update: | 224 | if part.update_fstab_in_rootfs and part.has_fstab and not part.no_fstab_update: |
222 | fstab_path = os.path.join(new_rootfs, "etc/fstab") | 225 | fstab_path = os.path.join(new_rootfs, "etc/fstab") |
223 | # Assume that fstab should always be owned by root with fixed permissions | 226 | # Assume that fstab should always be owned by root with fixed permissions |
224 | install_cmd = "install -m 0644 %s %s" % (part.updated_fstab_path, fstab_path) | 227 | install_cmd = "install -m 0644 -p %s %s" % (part.updated_fstab_path, fstab_path) |
225 | if new_pseudo: | 228 | if new_pseudo: |
226 | pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo) | 229 | pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo) |
227 | else: | 230 | else: |