diff options
Diffstat (limited to 'scripts/lib')
-rw-r--r-- | scripts/lib/build_perf/html/measurement_chart.html | 140 | ||||
-rw-r--r-- | scripts/lib/build_perf/html/report.html | 124 | ||||
-rw-r--r-- | scripts/lib/build_perf/report.py | 5 | ||||
-rw-r--r-- | scripts/lib/devtool/standard.py | 206 | ||||
-rw-r--r-- | scripts/lib/devtool/upgrade.py | 59 | ||||
-rw-r--r-- | scripts/lib/recipetool/create.py | 44 | ||||
-rw-r--r-- | scripts/lib/recipetool/create_go.py | 4 | ||||
-rw-r--r-- | scripts/lib/recipetool/create_npm.py | 95 | ||||
-rw-r--r-- | scripts/lib/resulttool/junit.py | 77 | ||||
-rw-r--r-- | scripts/lib/scriptutils.py | 15 | ||||
-rw-r--r-- | scripts/lib/wic/engine.py | 2 | ||||
-rw-r--r-- | scripts/lib/wic/partition.py | 37 | ||||
-rw-r--r-- | scripts/lib/wic/plugins/source/bootimg-efi.py | 4 | ||||
-rw-r--r-- | scripts/lib/wic/plugins/source/bootimg-partition.py | 39 |
14 files changed, 474 insertions, 377 deletions
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html index 65f1a227ad..05bd84e6ce 100644 --- a/scripts/lib/build_perf/html/measurement_chart.html +++ b/scripts/lib/build_perf/html/measurement_chart.html | |||
@@ -1,50 +1,100 @@ | |||
1 | <script type="text/javascript"> | 1 | <script type="module"> |
2 | chartsDrawing += 1; | 2 | // Get raw data |
3 | google.charts.setOnLoadCallback(drawChart_{{ chart_elem_id }}); | 3 | const rawData = [ |
4 | function drawChart_{{ chart_elem_id }}() { | 4 | {% for sample in measurement.samples %} |
5 | var data = new google.visualization.DataTable(); | 5 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'], |
6 | {% endfor %} | ||
7 | ]; | ||
6 | 8 | ||
7 | // Chart options | 9 | const convertToMinute = (time) => { |
8 | var options = { | 10 | return time[0]*60 + time[1] + time[2]/60 + time[3]/3600; |
9 | theme : 'material', | 11 | } |
10 | legend: 'none', | ||
11 | hAxis: { format: '', title: 'Commit number', | ||
12 | minValue: {{ chart_opts.haxis.min }}, | ||
13 | maxValue: {{ chart_opts.haxis.max }} }, | ||
14 | {% if measurement.type == 'time' %} | ||
15 | vAxis: { format: 'h:mm:ss' }, | ||
16 | {% else %} | ||
17 | vAxis: { format: '' }, | ||
18 | {% endif %} | ||
19 | pointSize: 5, | ||
20 | chartArea: { left: 80, right: 15 }, | ||
21 | }; | ||
22 | 12 | ||
23 | // Define data columns | 13 | // Update value format to either minutes or leave as size value |
24 | data.addColumn('number', 'Commit'); | 14 | const updateValue = (value) => { |
25 | data.addColumn('{{ measurement.value_type.gv_data_type }}', | 15 | // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds] |
26 | '{{ measurement.value_type.quantity }}'); | 16 | return Array.isArray(value) ? convertToMinute(value) : value |
27 | // Add data rows | 17 | } |
28 | data.addRows([ | ||
29 | {% for sample in measurement.samples %} | ||
30 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}], | ||
31 | {% endfor %} | ||
32 | ]); | ||
33 | 18 | ||
34 | // Finally, draw the chart | 19 | // Convert raw data to the format: [time, value] |
35 | chart_div = document.getElementById('{{ chart_elem_id }}'); | 20 | const data = rawData.map(([commit, value, time]) => { |
36 | var chart = new google.visualization.LineChart(chart_div); | 21 | return [ |
37 | google.visualization.events.addListener(chart, 'ready', function () { | 22 | // The Date object takes values in milliseconds rather than seconds. So to use a Unix timestamp we have to multiply it by 1000. |
38 | //chart_div = document.getElementById('{{ chart_elem_id }}'); | 23 | new Date(time * 1000).getTime(), |
39 | //chart_div.innerHTML = '<img src="' + chart.getImageURI() + '">'; | 24 | // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds] |
40 | png_div = document.getElementById('{{ chart_elem_id }}_png'); | 25 | updateValue(value) |
41 | png_div.outerHTML = '<a id="{{ chart_elem_id }}_png" href="' + chart.getImageURI() + '">PNG</a>'; | 26 | ] |
42 | console.log("CHART READY: {{ chart_elem_id }}"); | 27 | }); |
43 | chartsDrawing -= 1; | 28 | |
44 | if (chartsDrawing == 0) | 29 | // Set chart options |
45 | console.log("ALL CHARTS READY"); | 30 | const option = { |
31 | tooltip: { | ||
32 | trigger: 'axis', | ||
33 | enterable: true, | ||
34 | position: function (point, params, dom, rect, size) { | ||
35 | return [point[0]-150, '10%']; | ||
36 | }, | ||
37 | formatter: function (param) { | ||
38 | const value = param[0].value[1] | ||
39 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
40 | // Add commit hash to the tooltip as a link | ||
41 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
42 | if ('{{ measurement.value_type.quantity }}' == 'time') { | ||
43 | const hours = Math.floor(value/60) | ||
44 | const minutes = Math.floor(value % 60) | ||
45 | const seconds = Math.floor((value * 60) % 60) | ||
46 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <br/> <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>` | ||
47 | } | ||
48 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <br/> <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>` | ||
49 | ;} | ||
50 | }, | ||
51 | xAxis: { | ||
52 | type: 'time', | ||
53 | }, | ||
54 | yAxis: { | ||
55 | name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB', | ||
56 | type: 'value', | ||
57 | min: function(value) { | ||
58 | return Math.round(value.min - 0.5); | ||
59 | }, | ||
60 | max: function(value) { | ||
61 | return Math.round(value.max + 0.5); | ||
62 | } | ||
63 | }, | ||
64 | dataZoom: [ | ||
65 | { | ||
66 | type: 'slider', | ||
67 | xAxisIndex: 0, | ||
68 | filterMode: 'none' | ||
69 | }, | ||
70 | ], | ||
71 | series: [ | ||
72 | { | ||
73 | name: '{{ measurement.value_type.quantity }}', | ||
74 | type: 'line', | ||
75 | step: 'start', | ||
76 | symbol: 'none', | ||
77 | data: data | ||
78 | } | ||
79 | ] | ||
80 | }; | ||
81 | |||
82 | // Draw chart | ||
83 | const chart_div = document.getElementById('{{ chart_elem_id }}'); | ||
84 | // Set dark mode | ||
85 | let measurement_chart | ||
86 | if (window.matchMedia('(prefers-color-scheme: dark)').matches) { | ||
87 | measurement_chart= echarts.init(chart_div, 'dark', { | ||
88 | height: 320 | ||
46 | }); | 89 | }); |
47 | chart.draw(data, options); | 90 | } else { |
48 | } | 91 | measurement_chart= echarts.init(chart_div, null, { |
92 | height: 320 | ||
93 | }); | ||
94 | } | ||
95 | // Change chart size with browser resize | ||
96 | window.addEventListener('resize', function() { | ||
97 | measurement_chart.resize(); | ||
98 | }); | ||
99 | measurement_chart.setOption(option); | ||
49 | </script> | 100 | </script> |
50 | |||
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html index d1ba6f2578..537ed3ee52 100644 --- a/scripts/lib/build_perf/html/report.html +++ b/scripts/lib/build_perf/html/report.html | |||
@@ -3,11 +3,7 @@ | |||
3 | <head> | 3 | <head> |
4 | {# Scripts, for visualization#} | 4 | {# Scripts, for visualization#} |
5 | <!--START-OF-SCRIPTS--> | 5 | <!--START-OF-SCRIPTS--> |
6 | <script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script> | 6 | <script src=" https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js "></script> |
7 | <script type="text/javascript"> | ||
8 | google.charts.load('current', {'packages':['corechart']}); | ||
9 | var chartsDrawing = 0; | ||
10 | </script> | ||
11 | 7 | ||
12 | {# Render measurement result charts #} | 8 | {# Render measurement result charts #} |
13 | {% for test in test_data %} | 9 | {% for test in test_data %} |
@@ -23,28 +19,29 @@ var chartsDrawing = 0; | |||
23 | 19 | ||
24 | {# Styles #} | 20 | {# Styles #} |
25 | <style> | 21 | <style> |
22 | :root { | ||
23 | --text: #000; | ||
24 | --bg: #fff; | ||
25 | --h2heading: #707070; | ||
26 | --link: #0000EE; | ||
27 | --trtopborder: #9ca3af; | ||
28 | --trborder: #e5e7eb; | ||
29 | --chartborder: #f0f0f0; | ||
30 | } | ||
26 | .meta-table { | 31 | .meta-table { |
27 | font-size: 14px; | 32 | font-size: 14px; |
28 | text-align: left; | 33 | text-align: left; |
29 | border-collapse: collapse; | 34 | border-collapse: collapse; |
30 | } | 35 | } |
31 | .meta-table tr:nth-child(even){background-color: #f2f2f2} | ||
32 | meta-table th, .meta-table td { | ||
33 | padding: 4px; | ||
34 | } | ||
35 | .summary { | 36 | .summary { |
36 | margin: 0; | ||
37 | font-size: 14px; | 37 | font-size: 14px; |
38 | text-align: left; | 38 | text-align: left; |
39 | border-collapse: collapse; | 39 | border-collapse: collapse; |
40 | } | 40 | } |
41 | summary th, .meta-table td { | ||
42 | padding: 4px; | ||
43 | } | ||
44 | .measurement { | 41 | .measurement { |
45 | padding: 8px 0px 8px 8px; | 42 | padding: 8px 0px 8px 8px; |
46 | border: 2px solid #f0f0f0; | 43 | border: 2px solid var(--chartborder); |
47 | margin-bottom: 10px; | 44 | margin: 1.5rem 0; |
48 | } | 45 | } |
49 | .details { | 46 | .details { |
50 | margin: 0; | 47 | margin: 0; |
@@ -64,18 +61,71 @@ summary th, .meta-table td { | |||
64 | background-color: #f0f0f0; | 61 | background-color: #f0f0f0; |
65 | margin-left: 10px; | 62 | margin-left: 10px; |
66 | } | 63 | } |
67 | hr { | 64 | .card-container { |
68 | color: #f0f0f0; | 65 | border-bottom-width: 1px; |
66 | padding: 1.25rem 3rem; | ||
67 | box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1); | ||
68 | border-radius: 0.25rem; | ||
69 | } | ||
70 | body { | ||
71 | font-family: 'Helvetica', sans-serif; | ||
72 | margin: 3rem 8rem; | ||
73 | background-color: var(--bg); | ||
74 | color: var(--text); | ||
75 | } | ||
76 | h1 { | ||
77 | text-align: center; | ||
69 | } | 78 | } |
70 | h2 { | 79 | h2 { |
71 | font-size: 20px; | 80 | font-size: 1.5rem; |
72 | margin-bottom: 0px; | 81 | margin-bottom: 0px; |
73 | color: #707070; | 82 | color: var(--h2heading); |
83 | padding-top: 1.5rem; | ||
74 | } | 84 | } |
75 | h3 { | 85 | h3 { |
76 | font-size: 16px; | 86 | font-size: 1.3rem; |
77 | margin: 0px; | 87 | margin: 0px; |
78 | color: #707070; | 88 | color: var(--h2heading); |
89 | padding: 1.5rem 0; | ||
90 | } | ||
91 | h4 { | ||
92 | font-size: 14px; | ||
93 | font-weight: lighter; | ||
94 | line-height: 1.2rem; | ||
95 | margin: auto; | ||
96 | padding-top: 1rem; | ||
97 | } | ||
98 | table { | ||
99 | margin-top: 1.5rem; | ||
100 | line-height: 2rem; | ||
101 | } | ||
102 | tr { | ||
103 | border-bottom: 1px solid var(--trborder); | ||
104 | } | ||
105 | tr:first-child { | ||
106 | border-bottom: 1px solid var(--trtopborder); | ||
107 | } | ||
108 | tr:last-child { | ||
109 | border-bottom: none; | ||
110 | } | ||
111 | a { | ||
112 | text-decoration: none; | ||
113 | font-weight: bold; | ||
114 | color: var(--link); | ||
115 | } | ||
116 | a:hover { | ||
117 | color: #8080ff; | ||
118 | } | ||
119 | @media (prefers-color-scheme: dark) { | ||
120 | :root { | ||
121 | --text: #e9e8fa; | ||
122 | --bg: #0F0C28; | ||
123 | --h2heading: #B8B7CB; | ||
124 | --link: #87cefa; | ||
125 | --trtopborder: #394150; | ||
126 | --trborder: #212936; | ||
127 | --chartborder: #b1b0bf; | ||
128 | } | ||
79 | } | 129 | } |
80 | </style> | 130 | </style> |
81 | 131 | ||
@@ -83,13 +133,14 @@ h3 { | |||
83 | </head> | 133 | </head> |
84 | 134 | ||
85 | {% macro poky_link(commit) -%} | 135 | {% macro poky_link(commit) -%} |
86 | <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> | 136 | <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> |
87 | {%- endmacro %} | 137 | {%- endmacro %} |
88 | 138 | ||
89 | <body><div style="width: 700px"> | 139 | <body><div> |
140 | <h1 style="text-align: center;">Performance Test Report</h1> | ||
90 | {# Test metadata #} | 141 | {# Test metadata #} |
91 | <h2>General</h2> | 142 | <h2>General</h2> |
92 | <hr> | 143 | <h4>The table provides an overview of the comparison between two selected commits from the same branch.</h4> |
93 | <table class="meta-table" style="width: 100%"> | 144 | <table class="meta-table" style="width: 100%"> |
94 | <tr> | 145 | <tr> |
95 | <th></th> | 146 | <th></th> |
@@ -112,19 +163,21 @@ h3 { | |||
112 | 163 | ||
113 | {# Test result summary #} | 164 | {# Test result summary #} |
114 | <h2>Test result summary</h2> | 165 | <h2>Test result summary</h2> |
115 | <hr> | 166 | <h4>The test summary presents a thorough breakdown of each test conducted on the branch, including details such as build time and disk space consumption. Additionally, it gives insights into the average time taken for test execution, along with absolute and relative values for a better understanding.</h4> |
116 | <table class="summary" style="width: 100%"> | 167 | <table class="summary" style="width: 100%"> |
168 | <tr> | ||
169 | <th>Test name</th> | ||
170 | <th>Measurement description</th> | ||
171 | <th>Mean value</th> | ||
172 | <th>Absolute difference</th> | ||
173 | <th>Relative difference</th> | ||
174 | </tr> | ||
117 | {% for test in test_data %} | 175 | {% for test in test_data %} |
118 | {% if loop.index is even %} | ||
119 | {% set row_style = 'style="background-color: #f2f2f2"' %} | ||
120 | {% else %} | ||
121 | {% set row_style = 'style="background-color: #ffffff"' %} | ||
122 | {% endif %} | ||
123 | {% if test.status == 'SUCCESS' %} | 176 | {% if test.status == 'SUCCESS' %} |
124 | {% for measurement in test.measurements %} | 177 | {% for measurement in test.measurements %} |
125 | <tr {{ row_style }}> | 178 | <tr {{ row_style }}> |
126 | {% if loop.index == 1 %} | 179 | {% if loop.index == 1 %} |
127 | <td>{{ test.name }}: {{ test.description }}</td> | 180 | <td><a href=#{{test.name}}>{{ test.name }}: {{ test.description }}</a></td> |
128 | {% else %} | 181 | {% else %} |
129 | {# add empty cell in place of the test name#} | 182 | {# add empty cell in place of the test name#} |
130 | <td></td> | 183 | <td></td> |
@@ -153,10 +206,12 @@ h3 { | |||
153 | </table> | 206 | </table> |
154 | 207 | ||
155 | {# Detailed test results #} | 208 | {# Detailed test results #} |
209 | <h2>Test details</h2> | ||
210 | <h4>The following section provides details of each test, accompanied by charts representing build time and disk usage over time or by commit number.</h4> | ||
156 | {% for test in test_data %} | 211 | {% for test in test_data %} |
157 | <h2>{{ test.name }}: {{ test.description }}</h2> | 212 | <h3 style="color: #000;" id={{test.name}}>{{ test.name }}: {{ test.description }}</h3> |
158 | <hr> | ||
159 | {% if test.status == 'SUCCESS' %} | 213 | {% if test.status == 'SUCCESS' %} |
214 | <div class="card-container"> | ||
160 | {% for measurement in test.measurements %} | 215 | {% for measurement in test.measurements %} |
161 | <div class="measurement"> | 216 | <div class="measurement"> |
162 | <h3>{{ measurement.description }}</h3> | 217 | <h3>{{ measurement.description }}</h3> |
@@ -275,7 +330,8 @@ h3 { | |||
275 | {% endif %} | 330 | {% endif %} |
276 | {% endif %} | 331 | {% endif %} |
277 | </div> | 332 | </div> |
278 | {% endfor %} | 333 | {% endfor %} |
334 | </div> | ||
279 | {# Unsuccessful test #} | 335 | {# Unsuccessful test #} |
280 | {% else %} | 336 | {% else %} |
281 | <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} | 337 | <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} |
diff --git a/scripts/lib/build_perf/report.py b/scripts/lib/build_perf/report.py index ab77424cc7..f4e6a92e09 100644 --- a/scripts/lib/build_perf/report.py +++ b/scripts/lib/build_perf/report.py | |||
@@ -294,7 +294,7 @@ class SizeVal(MeasurementVal): | |||
294 | return "null" | 294 | return "null" |
295 | return self / 1024 | 295 | return self / 1024 |
296 | 296 | ||
297 | def measurement_stats(meas, prefix=''): | 297 | def measurement_stats(meas, prefix='', time=0): |
298 | """Get statistics of a measurement""" | 298 | """Get statistics of a measurement""" |
299 | if not meas: | 299 | if not meas: |
300 | return {prefix + 'sample_cnt': 0, | 300 | return {prefix + 'sample_cnt': 0, |
@@ -319,6 +319,8 @@ def measurement_stats(meas, prefix=''): | |||
319 | stats['quantity'] = val_cls.quantity | 319 | stats['quantity'] = val_cls.quantity |
320 | stats[prefix + 'sample_cnt'] = len(values) | 320 | stats[prefix + 'sample_cnt'] = len(values) |
321 | 321 | ||
322 | # Add start time for both type sysres and disk usage | ||
323 | start_time = time | ||
322 | mean_val = val_cls(mean(values)) | 324 | mean_val = val_cls(mean(values)) |
323 | min_val = val_cls(min(values)) | 325 | min_val = val_cls(min(values)) |
324 | max_val = val_cls(max(values)) | 326 | max_val = val_cls(max(values)) |
@@ -334,6 +336,7 @@ def measurement_stats(meas, prefix=''): | |||
334 | stats[prefix + 'max'] = max_val | 336 | stats[prefix + 'max'] = max_val |
335 | stats[prefix + 'minus'] = val_cls(mean_val - min_val) | 337 | stats[prefix + 'minus'] = val_cls(mean_val - min_val) |
336 | stats[prefix + 'plus'] = val_cls(max_val - mean_val) | 338 | stats[prefix + 'plus'] = val_cls(max_val - mean_val) |
339 | stats[prefix + 'start_time'] = start_time | ||
337 | 340 | ||
338 | return stats | 341 | return stats |
339 | 342 | ||
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index bd009f44b1..b2e1a6ca3a 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -387,6 +387,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | |||
387 | ret[split[3]] = split[0:3] | 387 | ret[split[3]] = split[0:3] |
388 | return ret | 388 | return ret |
389 | 389 | ||
390 | def _git_modified(repodir): | ||
391 | """List the difference between HEAD and the index""" | ||
392 | import bb | ||
393 | cmd = ['git', 'status', '--porcelain'] | ||
394 | out, _ = bb.process.run(cmd, cwd=repodir) | ||
395 | ret = [] | ||
396 | if out: | ||
397 | for line in out.split("\n"): | ||
398 | if line and not line.startswith('??'): | ||
399 | ret.append(line[3:]) | ||
400 | return ret | ||
401 | |||
402 | |||
390 | def _git_exclude_path(srctree, path): | 403 | def _git_exclude_path(srctree, path): |
391 | """Return pathspec (list of paths) that excludes certain path""" | 404 | """Return pathspec (list of paths) that excludes certain path""" |
392 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - | 405 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - |
@@ -460,32 +473,6 @@ def sync(args, config, basepath, workspace): | |||
460 | finally: | 473 | finally: |
461 | tinfoil.shutdown() | 474 | tinfoil.shutdown() |
462 | 475 | ||
463 | def symlink_oelocal_files_srctree(rd, srctree): | ||
464 | import oe.patch | ||
465 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | ||
466 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | ||
467 | # (otherwise the recipe won't build as expected) | ||
468 | local_files_dir = os.path.join(srctree, 'oe-local-files') | ||
469 | addfiles = [] | ||
470 | for root, _, files in os.walk(local_files_dir): | ||
471 | relpth = os.path.relpath(root, local_files_dir) | ||
472 | if relpth != '.': | ||
473 | bb.utils.mkdirhier(os.path.join(srctree, relpth)) | ||
474 | for fn in files: | ||
475 | if fn == '.gitignore': | ||
476 | continue | ||
477 | destpth = os.path.join(srctree, relpth, fn) | ||
478 | if os.path.exists(destpth): | ||
479 | os.unlink(destpth) | ||
480 | if relpth != '.': | ||
481 | back_relpth = os.path.relpath(local_files_dir, root) | ||
482 | os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) | ||
483 | else: | ||
484 | os.symlink('oe-local-files/%s' % fn, destpth) | ||
485 | addfiles.append(os.path.join(relpth, fn)) | ||
486 | if addfiles: | ||
487 | oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd) | ||
488 | |||
489 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 476 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
490 | """Extract sources of a recipe""" | 477 | """Extract sources of a recipe""" |
491 | import oe.recipeutils | 478 | import oe.recipeutils |
@@ -657,9 +644,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
657 | elif not os.path.exists(workshareddir): | 644 | elif not os.path.exists(workshareddir): |
658 | oe.path.copyhardlinktree(srcsubdir, workshareddir) | 645 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
659 | 646 | ||
660 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | ||
661 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | ||
662 | |||
663 | if sync: | 647 | if sync: |
664 | try: | 648 | try: |
665 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | 649 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) |
@@ -674,29 +658,8 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
674 | except bb.process.ExecutionError as e: | 658 | except bb.process.ExecutionError as e: |
675 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) | 659 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) |
676 | 660 | ||
677 | # Move the oe-local-files directory to srctree. | ||
678 | # As oe-local-files is not part of the constructed git tree, | ||
679 | # removing it directly during the synchronization might surprise | ||
680 | # the user. Instead, we move it to oe-local-files.bak and remind | ||
681 | # the user in the log message. | ||
682 | if os.path.exists(srctree_localdir + '.bak'): | ||
683 | shutil.rmtree(srctree_localdir + '.bak') | ||
684 | |||
685 | if os.path.exists(srctree_localdir): | ||
686 | logger.info('Backing up current local file directory %s' % srctree_localdir) | ||
687 | shutil.move(srctree_localdir, srctree_localdir + '.bak') | ||
688 | |||
689 | if os.path.exists(tempdir_localdir): | ||
690 | logger.info('Syncing local source files to srctree...') | ||
691 | shutil.copytree(tempdir_localdir, srctree_localdir) | ||
692 | else: | 661 | else: |
693 | # Move oe-local-files directory to srctree | ||
694 | if os.path.exists(tempdir_localdir): | ||
695 | logger.info('Adding local source files to srctree...') | ||
696 | shutil.move(tempdir_localdir, srcsubdir) | ||
697 | |||
698 | shutil.move(srcsubdir, srctree) | 662 | shutil.move(srcsubdir, srctree) |
699 | symlink_oelocal_files_srctree(d, srctree) | ||
700 | 663 | ||
701 | if is_kernel_yocto: | 664 | if is_kernel_yocto: |
702 | logger.info('Copying kernel config to srctree') | 665 | logger.info('Copying kernel config to srctree') |
@@ -852,34 +815,22 @@ def modify(args, config, basepath, workspace): | |||
852 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 815 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
853 | oe.path.copyhardlinktree(srcdir, srctree) | 816 | oe.path.copyhardlinktree(srcdir, srctree) |
854 | workdir = rd.getVar('WORKDIR') | 817 | workdir = rd.getVar('WORKDIR') |
818 | unpackdir = rd.getVar('UNPACKDIR') | ||
855 | srcsubdir = rd.getVar('S') | 819 | srcsubdir = rd.getVar('S') |
856 | localfilesdir = os.path.join(srctree, 'oe-local-files') | 820 | localfilesdir = os.path.join(srctree, 'oe-local-files') |
857 | # Move local source files into separate subdir | ||
858 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | ||
859 | local_files = oe.recipeutils.get_recipe_local_files(rd) | ||
860 | 821 | ||
861 | for key in local_files.copy(): | 822 | # Add locally copied files to gitignore as we add back to the metadata directly |
862 | if key.endswith('scc'): | 823 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
863 | sccfile = open(local_files[key], 'r') | ||
864 | for l in sccfile: | ||
865 | line = l.split() | ||
866 | if line and line[0] in ('kconf', 'patch'): | ||
867 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
868 | if not cfg in local_files.values(): | ||
869 | local_files[line[-1]] = cfg | ||
870 | shutil.copy2(cfg, workdir) | ||
871 | sccfile.close() | ||
872 | |||
873 | # Ignore local files with subdir={BP} | ||
874 | srcabspath = os.path.abspath(srcsubdir) | 824 | srcabspath = os.path.abspath(srcsubdir) |
875 | local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] | 825 | local_files = [fname for fname in local_files if |
826 | os.path.exists(os.path.join(unpackdir, fname)) and | ||
827 | srcabspath == unpackdir] | ||
876 | if local_files: | 828 | if local_files: |
877 | for fname in local_files: | 829 | with open(os.path.join(srctree, '.gitignore'), 'a+') as f: |
878 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 830 | f.write('# Ignore local files, by default. Remove following lines' |
879 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 831 | 'if you want to commit the directory to Git\n') |
880 | f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n') | 832 | for fname in local_files: |
881 | 833 | f.write('%s\n' % fname) | |
882 | symlink_oelocal_files_srctree(rd, srctree) | ||
883 | 834 | ||
884 | task = 'do_configure' | 835 | task = 'do_configure' |
885 | res = tinfoil.build_targets(pn, task, handle_events=True) | 836 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -904,7 +855,10 @@ def modify(args, config, basepath, workspace): | |||
904 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) | 855 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) |
905 | commits["."] = stdout.split() | 856 | commits["."] = stdout.split() |
906 | check_commits = True | 857 | check_commits = True |
907 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | 858 | try: |
859 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | ||
860 | except bb.process.ExecutionError: | ||
861 | stdout = "" | ||
908 | for line in stdout.splitlines(): | 862 | for line in stdout.splitlines(): |
909 | (rev, submodule_path) = line.split() | 863 | (rev, submodule_path) = line.split() |
910 | submodule = os.path.relpath(submodule_path, srctree) | 864 | submodule = os.path.relpath(submodule_path, srctree) |
@@ -998,13 +952,6 @@ def modify(args, config, basepath, workspace): | |||
998 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) | 952 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
999 | 953 | ||
1000 | if bb.data.inherits_class('kernel', rd): | 954 | if bb.data.inherits_class('kernel', rd): |
1001 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | ||
1002 | 'do_fetch do_unpack do_kernel_configcheck"\n') | ||
1003 | f.write('\ndo_patch[noexec] = "1"\n') | ||
1004 | f.write('\ndo_configure:append() {\n' | ||
1005 | ' cp ${B}/.config ${S}/.config.baseline\n' | ||
1006 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | ||
1007 | '}\n') | ||
1008 | f.write('\ndo_kernel_configme:prepend() {\n' | 955 | f.write('\ndo_kernel_configme:prepend() {\n' |
1009 | ' if [ -e ${S}/.config ]; then\n' | 956 | ' if [ -e ${S}/.config ]; then\n' |
1010 | ' mv ${S}/.config ${S}/.config.old\n' | 957 | ' mv ${S}/.config ${S}/.config.old\n' |
@@ -1475,6 +1422,7 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1475 | # Instead they are directly copied over the original source files (in | 1422 | # Instead they are directly copied over the original source files (in |
1476 | # recipe space). | 1423 | # recipe space). |
1477 | existing_files = oe.recipeutils.get_recipe_local_files(rd) | 1424 | existing_files = oe.recipeutils.get_recipe_local_files(rd) |
1425 | |||
1478 | new_set = None | 1426 | new_set = None |
1479 | updated = OrderedDict() | 1427 | updated = OrderedDict() |
1480 | added = OrderedDict() | 1428 | added = OrderedDict() |
@@ -1491,24 +1439,28 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1491 | if branchname.startswith(override_branch_prefix): | 1439 | if branchname.startswith(override_branch_prefix): |
1492 | return (updated, added, removed) | 1440 | return (updated, added, removed) |
1493 | 1441 | ||
1494 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1442 | files = _git_modified(srctree) |
1495 | git_files = _git_ls_tree(srctree) | 1443 | #if not files: |
1496 | if 'oe-local-files' in git_files: | 1444 | # files = _ls_tree(srctree) |
1497 | # If tracked by Git, take the files from srctree HEAD. First get | 1445 | for f in files: |
1498 | # the tree object of the directory | 1446 | fullfile = os.path.join(srctree, f) |
1499 | tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') | 1447 | if os.path.exists(os.path.join(fullfile, ".git")): |
1500 | tree = git_files['oe-local-files'][2] | 1448 | # submodules handled elsewhere |
1501 | bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, | 1449 | continue |
1502 | env=dict(os.environ, GIT_WORK_TREE=destdir, | 1450 | if f not in existing_files: |
1503 | GIT_INDEX_FILE=tmp_index)) | 1451 | added[f] = {} |
1504 | new_set = list(_git_ls_tree(srctree, tree, True).keys()) | 1452 | if os.path.isdir(os.path.join(srctree, f)): |
1505 | elif os.path.isdir(local_files_dir): | 1453 | shutil.copytree(fullfile, os.path.join(destdir, f)) |
1506 | # If not tracked by Git, just copy from working copy | 1454 | else: |
1507 | new_set = _ls_tree(local_files_dir) | 1455 | shutil.copy2(fullfile, os.path.join(destdir, f)) |
1508 | bb.process.run(['cp', '-ax', | 1456 | elif not os.path.exists(fullfile): |
1509 | os.path.join(local_files_dir, '.'), destdir]) | 1457 | removed[f] = existing_files[f] |
1510 | else: | 1458 | elif f in existing_files: |
1511 | new_set = [] | 1459 | updated[f] = {'path' : existing_files[f]} |
1460 | if os.path.isdir(os.path.join(srctree, f)): | ||
1461 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1462 | else: | ||
1463 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1512 | 1464 | ||
1513 | # Special handling for kernel config | 1465 | # Special handling for kernel config |
1514 | if bb.data.inherits_class('kernel-yocto', rd): | 1466 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -1516,17 +1468,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1516 | fragment_path = os.path.join(destdir, fragment_fn) | 1468 | fragment_path = os.path.join(destdir, fragment_fn) |
1517 | if _create_kconfig_diff(srctree, rd, fragment_path): | 1469 | if _create_kconfig_diff(srctree, rd, fragment_path): |
1518 | if os.path.exists(fragment_path): | 1470 | if os.path.exists(fragment_path): |
1519 | if fragment_fn not in new_set: | 1471 | if fragment_fn in removed: |
1520 | new_set.append(fragment_fn) | 1472 | del removed[fragment_fn] |
1521 | # Copy fragment to local-files | 1473 | if fragment_fn not in updated and fragment_fn not in added: |
1522 | if os.path.isdir(local_files_dir): | 1474 | added[fragment_fn] = {} |
1523 | shutil.copy2(fragment_path, local_files_dir) | ||
1524 | else: | 1475 | else: |
1525 | if fragment_fn in new_set: | 1476 | if fragment_fn in updated: |
1526 | new_set.remove(fragment_fn) | 1477 | revoved[fragment_fn] = updated[fragment_fn] |
1527 | # Remove fragment from local-files | 1478 | del updated[fragment_fn] |
1528 | if os.path.exists(os.path.join(local_files_dir, fragment_fn)): | ||
1529 | os.unlink(os.path.join(local_files_dir, fragment_fn)) | ||
1530 | 1479 | ||
1531 | # Special handling for cml1, ccmake, etc bbclasses that generated | 1480 | # Special handling for cml1, ccmake, etc bbclasses that generated |
1532 | # configuration fragment files that are consumed as source files | 1481 | # configuration fragment files that are consumed as source files |
@@ -1534,42 +1483,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1534 | if bb.data.inherits_class(frag_class, rd): | 1483 | if bb.data.inherits_class(frag_class, rd): |
1535 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) | 1484 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) |
1536 | if os.path.exists(srcpath): | 1485 | if os.path.exists(srcpath): |
1537 | if frag_name not in new_set: | 1486 | if frag_name in removed: |
1538 | new_set.append(frag_name) | 1487 | del removed[frag_name] |
1488 | if frag_name not in updated: | ||
1489 | added[frag_name] = {} | ||
1539 | # copy fragment into destdir | 1490 | # copy fragment into destdir |
1540 | shutil.copy2(srcpath, destdir) | 1491 | shutil.copy2(srcpath, destdir) |
1541 | # copy fragment into local files if exists | 1492 | |
1542 | if os.path.isdir(local_files_dir): | ||
1543 | shutil.copy2(srcpath, local_files_dir) | ||
1544 | |||
1545 | if new_set is not None: | ||
1546 | for fname in new_set: | ||
1547 | if fname in existing_files: | ||
1548 | origpath = existing_files.pop(fname) | ||
1549 | workpath = os.path.join(local_files_dir, fname) | ||
1550 | if not filecmp.cmp(origpath, workpath): | ||
1551 | updated[fname] = {'path' : origpath} | ||
1552 | elif fname != '.gitignore': | ||
1553 | added[fname] = {} | ||
1554 | |||
1555 | workdir = rd.getVar('WORKDIR') | ||
1556 | s = rd.getVar('S') | ||
1557 | if not s.endswith(os.sep): | ||
1558 | s += os.sep | ||
1559 | |||
1560 | if workdir != s: | ||
1561 | # Handle files where subdir= was specified | ||
1562 | for fname in list(existing_files.keys()): | ||
1563 | # FIXME handle both subdir starting with BP and not? | ||
1564 | fworkpath = os.path.join(workdir, fname) | ||
1565 | if fworkpath.startswith(s): | ||
1566 | fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) | ||
1567 | if os.path.exists(fpath): | ||
1568 | origpath = existing_files.pop(fname) | ||
1569 | if not filecmp.cmp(origpath, fpath): | ||
1570 | updated[fpath] = {'path' : origpath} | ||
1571 | |||
1572 | removed = existing_files | ||
1573 | return (updated, added, removed) | 1493 | return (updated, added, removed) |
1574 | 1494 | ||
1575 | 1495 | ||
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index fa5b8ef3c7..eed3a49e4b 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -32,7 +32,7 @@ def _run(cmd, cwd=''): | |||
32 | 32 | ||
33 | def _get_srctree(tmpdir): | 33 | def _get_srctree(tmpdir): |
34 | srctree = tmpdir | 34 | srctree = tmpdir |
35 | dirs = scriptutils.filter_src_subdirs(tmpdir) | 35 | dirs = os.listdir(tmpdir) |
36 | if len(dirs) == 1: | 36 | if len(dirs) == 1: |
37 | srctree = os.path.join(tmpdir, dirs[0]) | 37 | srctree = os.path.join(tmpdir, dirs[0]) |
38 | else: | 38 | else: |
@@ -76,19 +76,19 @@ def _rename_recipe_dirs(oldpv, newpv, path): | |||
76 | bb.utils.rename(os.path.join(path, oldfile), | 76 | bb.utils.rename(os.path.join(path, oldfile), |
77 | os.path.join(path, newfile)) | 77 | os.path.join(path, newfile)) |
78 | 78 | ||
79 | def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): | 79 | def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path): |
80 | oldrecipe = os.path.basename(oldrecipe) | 80 | oldrecipe = os.path.basename(oldrecipe) |
81 | if oldrecipe.endswith('_%s.bb' % oldpv): | 81 | if oldrecipe.endswith('_%s.bb' % oldpv): |
82 | newrecipe = '%s_%s.bb' % (bpn, newpv) | 82 | newrecipe = '%s_%s.bb' % (pn, newpv) |
83 | if oldrecipe != newrecipe: | 83 | if oldrecipe != newrecipe: |
84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) | 84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) |
85 | else: | 85 | else: |
86 | newrecipe = oldrecipe | 86 | newrecipe = oldrecipe |
87 | return os.path.join(path, newrecipe) | 87 | return os.path.join(path, newrecipe) |
88 | 88 | ||
89 | def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): | 89 | def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path): |
90 | _rename_recipe_dirs(oldpv, newpv, path) | 90 | _rename_recipe_dirs(oldpv, newpv, path) |
91 | return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) | 91 | return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path) |
92 | 92 | ||
93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): | 93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): |
94 | """Writes an append file""" | 94 | """Writes an append file""" |
@@ -335,19 +335,19 @@ def _add_license_diff_to_recipe(path, diff): | |||
335 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): | 335 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): |
336 | """Creates the new recipe under workspace""" | 336 | """Creates the new recipe under workspace""" |
337 | 337 | ||
338 | bpn = rd.getVar('BPN') | 338 | pn = rd.getVar('PN') |
339 | path = os.path.join(workspace, 'recipes', bpn) | 339 | path = os.path.join(workspace, 'recipes', pn) |
340 | bb.utils.mkdirhier(path) | 340 | bb.utils.mkdirhier(path) |
341 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) | 341 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) |
342 | if not copied: | 342 | if not copied: |
343 | raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) | 343 | raise DevtoolError('Internal error - no files were copied for recipe %s' % pn) |
344 | logger.debug('Copied %s to %s' % (copied, path)) | 344 | logger.debug('Copied %s to %s' % (copied, path)) |
345 | 345 | ||
346 | oldpv = rd.getVar('PV') | 346 | oldpv = rd.getVar('PV') |
347 | if not newpv: | 347 | if not newpv: |
348 | newpv = oldpv | 348 | newpv = oldpv |
349 | origpath = rd.getVar('FILE') | 349 | origpath = rd.getVar('FILE') |
350 | fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) | 350 | fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path) |
351 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) | 351 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) |
352 | 352 | ||
353 | newvalues = {} | 353 | newvalues = {} |
@@ -534,6 +534,15 @@ def _generate_license_diff(old_licenses, new_licenses): | |||
534 | diff = diff + line | 534 | diff = diff + line |
535 | return diff | 535 | return diff |
536 | 536 | ||
537 | def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil): | ||
538 | tasks = [] | ||
539 | for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split(): | ||
540 | logger.info('Running extra recipe upgrade task: %s' % task) | ||
541 | res = tinfoil.build_targets(pn, task, handle_events=True) | ||
542 | |||
543 | if not res: | ||
544 | raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn)) | ||
545 | |||
537 | def upgrade(args, config, basepath, workspace): | 546 | def upgrade(args, config, basepath, workspace): |
538 | """Entry point for the devtool 'upgrade' subcommand""" | 547 | """Entry point for the devtool 'upgrade' subcommand""" |
539 | 548 | ||
@@ -601,7 +610,7 @@ def upgrade(args, config, basepath, workspace): | |||
601 | license_diff = _generate_license_diff(old_licenses, new_licenses) | 610 | license_diff = _generate_license_diff(old_licenses, new_licenses) |
602 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) | 611 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) |
603 | except (bb.process.CmdError, DevtoolError) as e: | 612 | except (bb.process.CmdError, DevtoolError) as e: |
604 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN')) | 613 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN')) |
605 | _upgrade_error(e, recipedir, srctree, args.keep_failure) | 614 | _upgrade_error(e, recipedir, srctree, args.keep_failure) |
606 | standard._add_md5(config, pn, os.path.dirname(rf)) | 615 | standard._add_md5(config, pn, os.path.dirname(rf)) |
607 | 616 | ||
@@ -609,6 +618,8 @@ def upgrade(args, config, basepath, workspace): | |||
609 | copied, config.workspace_path, rd) | 618 | copied, config.workspace_path, rd) |
610 | standard._add_md5(config, pn, af) | 619 | standard._add_md5(config, pn, af) |
611 | 620 | ||
621 | _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil) | ||
622 | |||
612 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 623 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
613 | 624 | ||
614 | logger.info('Upgraded source extracted to %s' % srctree) | 625 | logger.info('Upgraded source extracted to %s' % srctree) |
@@ -643,18 +654,28 @@ def latest_version(args, config, basepath, workspace): | |||
643 | return 0 | 654 | return 0 |
644 | 655 | ||
645 | def check_upgrade_status(args, config, basepath, workspace): | 656 | def check_upgrade_status(args, config, basepath, workspace): |
657 | def _print_status(recipe): | ||
658 | print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'], | ||
659 | recipe['cur_ver'], | ||
660 | recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"), | ||
661 | recipe['maintainer'], | ||
662 | recipe['revision'] if recipe['revision'] != 'N/A' else "", | ||
663 | "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else "")) | ||
646 | if not args.recipe: | 664 | if not args.recipe: |
647 | logger.info("Checking the upstream status for all recipes may take a few minutes") | 665 | logger.info("Checking the upstream status for all recipes may take a few minutes") |
648 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) | 666 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) |
649 | for result in results: | 667 | for recipegroup in results: |
650 | # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason | 668 | upgrades = [r for r in recipegroup if r['status'] != 'MATCH'] |
651 | if args.all or result[1] != 'MATCH': | 669 | currents = [r for r in recipegroup if r['status'] == 'MATCH'] |
652 | print("{:25} {:15} {:15} {} {} {}".format( result[0], | 670 | if len(upgrades) > 1: |
653 | result[2], | 671 | print("These recipes need to be upgraded together {") |
654 | result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), | 672 | for r in upgrades: |
655 | result[4], | 673 | _print_status(r) |
656 | result[5] if result[5] != 'N/A' else "", | 674 | if len(upgrades) > 1: |
657 | "cannot be updated due to: %s" %(result[6]) if result[6] else "")) | 675 | print("}") |
676 | for r in currents: | ||
677 | if args.all: | ||
678 | _print_status(r) | ||
658 | 679 | ||
659 | def register_commands(subparsers, context): | 680 | def register_commands(subparsers, context): |
660 | """Register devtool subcommands from this plugin""" | 681 | """Register devtool subcommands from this plugin""" |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index 8e9ff38db6..ea2ef5be63 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
@@ -528,7 +528,7 @@ def create_recipe(args): | |||
528 | if ftmpdir and args.keep_temp: | 528 | if ftmpdir and args.keep_temp: |
529 | logger.info('Fetch temp directory is %s' % ftmpdir) | 529 | logger.info('Fetch temp directory is %s' % ftmpdir) |
530 | 530 | ||
531 | dirlist = scriptutils.filter_src_subdirs(srctree) | 531 | dirlist = os.listdir(srctree) |
532 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) | 532 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) |
533 | if len(dirlist) == 1: | 533 | if len(dirlist) == 1: |
534 | singleitem = os.path.join(srctree, dirlist[0]) | 534 | singleitem = os.path.join(srctree, dirlist[0]) |
@@ -960,7 +960,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): | |||
960 | # Someone else has already handled the license vars, just return their value | 960 | # Someone else has already handled the license vars, just return their value |
961 | return lichandled[0][1] | 961 | return lichandled[0][1] |
962 | 962 | ||
963 | licvalues = guess_license(srctree, d) | 963 | licvalues = find_licenses(srctree, d) |
964 | licenses = [] | 964 | licenses = [] |
965 | lic_files_chksum = [] | 965 | lic_files_chksum = [] |
966 | lic_unknown = [] | 966 | lic_unknown = [] |
@@ -1216,13 +1216,7 @@ def crunch_license(licfile): | |||
1216 | lictext = '' | 1216 | lictext = '' |
1217 | return md5val, lictext | 1217 | return md5val, lictext |
1218 | 1218 | ||
1219 | def guess_license(srctree, d): | 1219 | def find_license_files(srctree): |
1220 | import bb | ||
1221 | md5sums = get_license_md5sums(d) | ||
1222 | |||
1223 | crunched_md5sums = crunch_known_licenses(d) | ||
1224 | |||
1225 | licenses = [] | ||
1226 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | 1220 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] |
1227 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go") | 1221 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go") |
1228 | licfiles = [] | 1222 | licfiles = [] |
@@ -1235,11 +1229,22 @@ def guess_license(srctree, d): | |||
1235 | fullpath = os.path.join(root, fn) | 1229 | fullpath = os.path.join(root, fn) |
1236 | if not fullpath in licfiles: | 1230 | if not fullpath in licfiles: |
1237 | licfiles.append(fullpath) | 1231 | licfiles.append(fullpath) |
1232 | |||
1233 | return licfiles | ||
1234 | |||
1235 | def match_licenses(licfiles, srctree, d): | ||
1236 | import bb | ||
1237 | md5sums = get_license_md5sums(d) | ||
1238 | |||
1239 | crunched_md5sums = crunch_known_licenses(d) | ||
1240 | |||
1241 | licenses = [] | ||
1238 | for licfile in sorted(licfiles): | 1242 | for licfile in sorted(licfiles): |
1239 | md5value = bb.utils.md5_file(licfile) | 1243 | resolved_licfile = d.expand(licfile) |
1244 | md5value = bb.utils.md5_file(resolved_licfile) | ||
1240 | license = md5sums.get(md5value, None) | 1245 | license = md5sums.get(md5value, None) |
1241 | if not license: | 1246 | if not license: |
1242 | crunched_md5, lictext = crunch_license(licfile) | 1247 | crunched_md5, lictext = crunch_license(resolved_licfile) |
1243 | license = crunched_md5sums.get(crunched_md5, None) | 1248 | license = crunched_md5sums.get(crunched_md5, None) |
1244 | if lictext and not license: | 1249 | if lictext and not license: |
1245 | license = 'Unknown' | 1250 | license = 'Unknown' |
@@ -1249,13 +1254,19 @@ def guess_license(srctree, d): | |||
1249 | if license: | 1254 | if license: |
1250 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | 1255 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) |
1251 | 1256 | ||
1257 | return licenses | ||
1258 | |||
1259 | def find_licenses(srctree, d): | ||
1260 | licfiles = find_license_files(srctree) | ||
1261 | licenses = match_licenses(licfiles, srctree, d) | ||
1262 | |||
1252 | # FIXME should we grab at least one source file with a license header and add that too? | 1263 | # FIXME should we grab at least one source file with a license header and add that too? |
1253 | 1264 | ||
1254 | return licenses | 1265 | return licenses |
1255 | 1266 | ||
1256 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): | 1267 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): |
1257 | """ | 1268 | """ |
1258 | Given a list of (license, path, md5sum) as returned by guess_license(), | 1269 | Given a list of (license, path, md5sum) as returned by match_licenses(), |
1259 | a dict of package name to path mappings, write out a set of | 1270 | a dict of package name to path mappings, write out a set of |
1260 | package-specific LICENSE values. | 1271 | package-specific LICENSE values. |
1261 | """ | 1272 | """ |
@@ -1284,6 +1295,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn | |||
1284 | outlicenses[pkgname] = licenses | 1295 | outlicenses[pkgname] = licenses |
1285 | return outlicenses | 1296 | return outlicenses |
1286 | 1297 | ||
1298 | def generate_common_licenses_chksums(common_licenses, d): | ||
1299 | lic_files_chksums = [] | ||
1300 | for license in tidy_licenses(common_licenses): | ||
1301 | licfile = '${COMMON_LICENSE_DIR}/' + license | ||
1302 | md5value = bb.utils.md5_file(d.expand(licfile)) | ||
1303 | lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value)) | ||
1304 | return lic_files_chksums | ||
1305 | |||
1287 | def read_pkgconfig_provides(d): | 1306 | def read_pkgconfig_provides(d): |
1288 | pkgdatadir = d.getVar('PKGDATA_DIR') | 1307 | pkgdatadir = d.getVar('PKGDATA_DIR') |
1289 | pkgmap = {} | 1308 | pkgmap = {} |
@@ -1418,4 +1437,3 @@ def register_commands(subparsers): | |||
1418 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) | 1437 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) |
1419 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') | 1438 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') |
1420 | parser_create.set_defaults(func=create_recipe) | 1439 | parser_create.set_defaults(func=create_recipe) |
1421 | |||
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py index a85a2f2786..5cc53931f0 100644 --- a/scripts/lib/recipetool/create_go.py +++ b/scripts/lib/recipetool/create_go.py | |||
@@ -14,7 +14,7 @@ from collections import namedtuple | |||
14 | from enum import Enum | 14 | from enum import Enum |
15 | from html.parser import HTMLParser | 15 | from html.parser import HTMLParser |
16 | from recipetool.create import RecipeHandler, handle_license_vars | 16 | from recipetool.create import RecipeHandler, handle_license_vars |
17 | from recipetool.create import guess_license, tidy_licenses, fixup_license | 17 | from recipetool.create import find_licenses, tidy_licenses, fixup_license |
18 | from recipetool.create import determine_from_url | 18 | from recipetool.create import determine_from_url |
19 | from urllib.error import URLError, HTTPError | 19 | from urllib.error import URLError, HTTPError |
20 | 20 | ||
@@ -624,7 +624,7 @@ class GoRecipeHandler(RecipeHandler): | |||
624 | 624 | ||
625 | licenses = [] | 625 | licenses = [] |
626 | lic_files_chksum = [] | 626 | lic_files_chksum = [] |
627 | licvalues = guess_license(tmp_vendor_dir, d) | 627 | licvalues = find_licenses(tmp_vendor_dir, d) |
628 | shutil.rmtree(tmp_vendor_dir) | 628 | shutil.rmtree(tmp_vendor_dir) |
629 | 629 | ||
630 | if licvalues: | 630 | if licvalues: |
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py index 113a89f6a6..3363a0e7ee 100644 --- a/scripts/lib/recipetool/create_npm.py +++ b/scripts/lib/recipetool/create_npm.py | |||
@@ -16,8 +16,7 @@ from bb.fetch2.npm import NpmEnvironment | |||
16 | from bb.fetch2.npm import npm_package | 16 | from bb.fetch2.npm import npm_package |
17 | from bb.fetch2.npmsw import foreach_dependencies | 17 | from bb.fetch2.npmsw import foreach_dependencies |
18 | from recipetool.create import RecipeHandler | 18 | from recipetool.create import RecipeHandler |
19 | from recipetool.create import get_license_md5sums | 19 | from recipetool.create import match_licenses, find_license_files, generate_common_licenses_chksums |
20 | from recipetool.create import guess_license | ||
21 | from recipetool.create import split_pkg_licenses | 20 | from recipetool.create import split_pkg_licenses |
22 | logger = logging.getLogger('recipetool') | 21 | logger = logging.getLogger('recipetool') |
23 | 22 | ||
@@ -112,40 +111,54 @@ class NpmRecipeHandler(RecipeHandler): | |||
112 | """Return the extra license files and the list of packages""" | 111 | """Return the extra license files and the list of packages""" |
113 | licfiles = [] | 112 | licfiles = [] |
114 | packages = {} | 113 | packages = {} |
114 | # Licenses from package.json will point to COMMON_LICENSE_DIR so we need | ||
115 | # to associate them explicitely to packages for split_pkg_licenses() | ||
116 | fallback_licenses = dict() | ||
117 | |||
118 | def _find_package_licenses(destdir): | ||
119 | """Either find license files, or use package.json metadata""" | ||
120 | def _get_licenses_from_package_json(package_json): | ||
121 | with open(os.path.join(srctree, package_json), "r") as f: | ||
122 | data = json.load(f) | ||
123 | if "license" in data: | ||
124 | licenses = data["license"].split(" ") | ||
125 | licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"] | ||
126 | return [], licenses | ||
127 | else: | ||
128 | return [package_json], None | ||
115 | 129 | ||
116 | # Handle the parent package | ||
117 | packages["${PN}"] = "" | ||
118 | |||
119 | def _licfiles_append_fallback_readme_files(destdir): | ||
120 | """Append README files as fallback to license files if a license files is missing""" | ||
121 | |||
122 | fallback = True | ||
123 | readmes = [] | ||
124 | basedir = os.path.join(srctree, destdir) | 130 | basedir = os.path.join(srctree, destdir) |
125 | for fn in os.listdir(basedir): | 131 | licfiles = find_license_files(basedir) |
126 | upper = fn.upper() | 132 | if len(licfiles) > 0: |
127 | if upper.startswith("README"): | 133 | return licfiles, None |
128 | fullpath = os.path.join(basedir, fn) | 134 | else: |
129 | readmes.append(fullpath) | 135 | # A license wasn't found in the package directory, so we'll use the package.json metadata |
130 | if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: | 136 | pkg_json = os.path.join(basedir, "package.json") |
131 | fallback = False | 137 | return _get_licenses_from_package_json(pkg_json) |
132 | if fallback: | 138 | |
133 | for readme in readmes: | 139 | def _get_package_licenses(destdir, package): |
134 | licfiles.append(os.path.relpath(readme, srctree)) | 140 | (package_licfiles, package_licenses) = _find_package_licenses(destdir) |
141 | if package_licfiles: | ||
142 | licfiles.extend(package_licfiles) | ||
143 | else: | ||
144 | fallback_licenses[package] = package_licenses | ||
135 | 145 | ||
136 | # Handle the dependencies | 146 | # Handle the dependencies |
137 | def _handle_dependency(name, params, destdir): | 147 | def _handle_dependency(name, params, destdir): |
138 | deptree = destdir.split('node_modules/') | 148 | deptree = destdir.split('node_modules/') |
139 | suffix = "-".join([npm_package(dep) for dep in deptree]) | 149 | suffix = "-".join([npm_package(dep) for dep in deptree]) |
140 | packages["${PN}" + suffix] = destdir | 150 | packages["${PN}" + suffix] = destdir |
141 | _licfiles_append_fallback_readme_files(destdir) | 151 | _get_package_licenses(destdir, "${PN}" + suffix) |
142 | 152 | ||
143 | with open(shrinkwrap_file, "r") as f: | 153 | with open(shrinkwrap_file, "r") as f: |
144 | shrinkwrap = json.load(f) | 154 | shrinkwrap = json.load(f) |
145 | |||
146 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) | 155 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) |
147 | 156 | ||
148 | return licfiles, packages | 157 | # Handle the parent package |
158 | packages["${PN}"] = "" | ||
159 | _get_package_licenses(srctree, "${PN}") | ||
160 | |||
161 | return licfiles, packages, fallback_licenses | ||
149 | 162 | ||
150 | # Handle the peer dependencies | 163 | # Handle the peer dependencies |
151 | def _handle_peer_dependency(self, shrinkwrap_file): | 164 | def _handle_peer_dependency(self, shrinkwrap_file): |
@@ -266,36 +279,12 @@ class NpmRecipeHandler(RecipeHandler): | |||
266 | fetcher.unpack(srctree) | 279 | fetcher.unpack(srctree) |
267 | 280 | ||
268 | bb.note("Handling licences ...") | 281 | bb.note("Handling licences ...") |
269 | (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) | 282 | (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev) |
270 | 283 | licvalues = match_licenses(licfiles, srctree, d) | |
271 | def _guess_odd_license(licfiles): | 284 | split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses) |
272 | import bb | 285 | fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist] |
273 | 286 | extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d) | |
274 | md5sums = get_license_md5sums(d, linenumbers=True) | 287 | extravalues["LICENSE"] = fallback_licenses_flat |
275 | |||
276 | chksums = [] | ||
277 | licenses = [] | ||
278 | for licfile in licfiles: | ||
279 | f = os.path.join(srctree, licfile) | ||
280 | md5value = bb.utils.md5_file(f) | ||
281 | (license, beginline, endline, md5) = md5sums.get(md5value, | ||
282 | (None, "", "", "")) | ||
283 | if not license: | ||
284 | license = "Unknown" | ||
285 | logger.info("Please add the following line for '%s' to a " | ||
286 | "'lib/recipetool/licenses.csv' and replace `Unknown`, " | ||
287 | "`X`, `Y` and `MD5` with the license, begin line, " | ||
288 | "end line and partial MD5 checksum:\n" \ | ||
289 | "%s,Unknown,X,Y,MD5" % (licfile, md5value)) | ||
290 | chksums.append("file://%s%s%s;md5=%s" % (licfile, | ||
291 | ";beginline=%s" % (beginline) if beginline else "", | ||
292 | ";endline=%s" % (endline) if endline else "", | ||
293 | md5 if md5 else md5value)) | ||
294 | licenses.append((license, licfile, md5value)) | ||
295 | return (licenses, chksums) | ||
296 | |||
297 | (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles) | ||
298 | split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after) | ||
299 | 288 | ||
300 | classes.append("npm") | 289 | classes.append("npm") |
301 | handled.append("buildsystem") | 290 | handled.append("buildsystem") |
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py new file mode 100644 index 0000000000..c7a53dc550 --- /dev/null +++ b/scripts/lib/resulttool/junit.py | |||
@@ -0,0 +1,77 @@ | |||
1 | # resulttool - report test results in JUnit XML format | ||
2 | # | ||
3 | # Copyright (c) 2024, Siemens AG. | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | import os | ||
9 | import re | ||
10 | import xml.etree.ElementTree as ET | ||
11 | import resulttool.resultutils as resultutils | ||
12 | |||
13 | def junit(args, logger): | ||
14 | testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map) | ||
15 | |||
16 | total_time = 0 | ||
17 | skipped = 0 | ||
18 | failures = 0 | ||
19 | errors = 0 | ||
20 | |||
21 | for tests in testresults.values(): | ||
22 | results = tests[next(reversed(tests))].get("result", {}) | ||
23 | |||
24 | for result_id, result in results.items(): | ||
25 | # filter out ptestresult.rawlogs and ptestresult.sections | ||
26 | if re.search(r'\.test_', result_id): | ||
27 | total_time += result.get("duration", 0) | ||
28 | |||
29 | if result['status'] == "FAILED": | ||
30 | failures += 1 | ||
31 | elif result['status'] == "ERROR": | ||
32 | errors += 1 | ||
33 | elif result['status'] == "SKIPPED": | ||
34 | skipped += 1 | ||
35 | |||
36 | testsuites_node = ET.Element("testsuites") | ||
37 | testsuites_node.set("time", "%s" % total_time) | ||
38 | testsuite_node = ET.SubElement(testsuites_node, "testsuite") | ||
39 | testsuite_node.set("name", "Testimage") | ||
40 | testsuite_node.set("time", "%s" % total_time) | ||
41 | testsuite_node.set("tests", "%s" % len(results)) | ||
42 | testsuite_node.set("failures", "%s" % failures) | ||
43 | testsuite_node.set("errors", "%s" % errors) | ||
44 | testsuite_node.set("skipped", "%s" % skipped) | ||
45 | |||
46 | for result_id, result in results.items(): | ||
47 | if re.search(r'\.test_', result_id): | ||
48 | testcase_node = ET.SubElement(testsuite_node, "testcase", { | ||
49 | "name": result_id, | ||
50 | "classname": "Testimage", | ||
51 | "time": str(result['duration']) | ||
52 | }) | ||
53 | if result['status'] == "SKIPPED": | ||
54 | ET.SubElement(testcase_node, "skipped", message=result['log']) | ||
55 | elif result['status'] == "FAILED": | ||
56 | ET.SubElement(testcase_node, "failure", message=result['log']) | ||
57 | elif result['status'] == "ERROR": | ||
58 | ET.SubElement(testcase_node, "error", message=result['log']) | ||
59 | |||
60 | tree = ET.ElementTree(testsuites_node) | ||
61 | |||
62 | if args.junit_xml_path is None: | ||
63 | args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml' | ||
64 | tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True) | ||
65 | |||
66 | logger.info('Saved JUnit XML report as %s' % args.junit_xml_path) | ||
67 | |||
68 | def register_commands(subparsers): | ||
69 | """Register subcommands from this plugin""" | ||
70 | parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format', | ||
71 | description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.', | ||
72 | group='analysis') | ||
73 | parser_build.set_defaults(func=junit) | ||
74 | parser_build.add_argument('json_file', | ||
75 | help='json file should point to the testresults.json') | ||
76 | parser_build.add_argument('-j', '--junit_xml_path', | ||
77 | help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml') | ||
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index f23e53cba9..81f0b01fa5 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
@@ -179,6 +179,8 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
179 | f.write('SRCREV = "%s"\n' % srcrev) | 179 | f.write('SRCREV = "%s"\n' % srcrev) |
180 | f.write('PV = "0.0+"\n') | 180 | f.write('PV = "0.0+"\n') |
181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) | 181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) |
182 | f.write('UNPACKDIR = "%s"\n' % destdir) | ||
183 | |||
182 | # Set S out of the way so it doesn't get created under the workdir | 184 | # Set S out of the way so it doesn't get created under the workdir |
183 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) | 185 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) |
184 | if not mirrors: | 186 | if not mirrors: |
@@ -232,10 +234,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
232 | if e.errno != errno.ENOTEMPTY: | 234 | if e.errno != errno.ENOTEMPTY: |
233 | raise | 235 | raise |
234 | 236 | ||
235 | bb.utils.mkdirhier(destdir) | ||
236 | for fn in os.listdir(tmpworkdir): | ||
237 | shutil.move(os.path.join(tmpworkdir, fn), destdir) | ||
238 | |||
239 | finally: | 237 | finally: |
240 | if not preserve_tmp: | 238 | if not preserve_tmp: |
241 | shutil.rmtree(tmpdir) | 239 | shutil.rmtree(tmpdir) |
@@ -271,12 +269,3 @@ def is_src_url(param): | |||
271 | return True | 269 | return True |
272 | return False | 270 | return False |
273 | 271 | ||
274 | def filter_src_subdirs(pth): | ||
275 | """ | ||
276 | Filter out subdirectories of initial unpacked source trees that we do not care about. | ||
277 | Used by devtool and recipetool. | ||
278 | """ | ||
279 | dirlist = os.listdir(pth) | ||
280 | filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa'] | ||
281 | dirlist = [x for x in dirlist if x not in filterout] | ||
282 | return dirlist | ||
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py index 674ccfc244..ce7e6c5d75 100644 --- a/scripts/lib/wic/engine.py +++ b/scripts/lib/wic/engine.py | |||
@@ -359,7 +359,7 @@ class Disk: | |||
359 | Remove files/dirs and their contents from the partition. | 359 | Remove files/dirs and their contents from the partition. |
360 | This only applies to ext* partition. | 360 | This only applies to ext* partition. |
361 | """ | 361 | """ |
362 | abs_path = re.sub('\/\/+', '/', path) | 362 | abs_path = re.sub(r'\/\/+', '/', path) |
363 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, | 363 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, |
364 | self._get_part_image(pnum), | 364 | self._get_part_image(pnum), |
365 | abs_path) | 365 | abs_path) |
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py index 795707ec5d..bf2c34d594 100644 --- a/scripts/lib/wic/partition.py +++ b/scripts/lib/wic/partition.py | |||
@@ -284,19 +284,8 @@ class Partition(): | |||
284 | 284 | ||
285 | extraopts = self.mkfs_extraopts or "-F -i 8192" | 285 | extraopts = self.mkfs_extraopts or "-F -i 8192" |
286 | 286 | ||
287 | if os.getenv('SOURCE_DATE_EPOCH'): | 287 | # use hash_seed to generate reproducible ext4 images |
288 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | 288 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo) |
289 | if pseudo: | ||
290 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
291 | else: | ||
292 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
293 | |||
294 | # Set hash_seed to generate deterministic directory indexes | ||
295 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
296 | if self.fsuuid: | ||
297 | namespace = uuid.UUID(self.fsuuid) | ||
298 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
299 | extraopts += " -E hash_seed=%s" % hash_seed | ||
300 | 289 | ||
301 | label_str = "" | 290 | label_str = "" |
302 | if self.label: | 291 | if self.label: |
@@ -344,6 +333,23 @@ class Partition(): | |||
344 | 333 | ||
345 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 334 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
346 | 335 | ||
336 | def get_hash_seed_ext4(self, extraopts, pseudo): | ||
337 | if os.getenv('SOURCE_DATE_EPOCH'): | ||
338 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | ||
339 | if pseudo: | ||
340 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
341 | else: | ||
342 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
343 | |||
344 | # Set hash_seed to generate deterministic directory indexes | ||
345 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
346 | if self.fsuuid: | ||
347 | namespace = uuid.UUID(self.fsuuid) | ||
348 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
349 | extraopts += " -E hash_seed=%s" % hash_seed | ||
350 | |||
351 | return (extraopts, pseudo) | ||
352 | |||
347 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, | 353 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, |
348 | native_sysroot, pseudo): | 354 | native_sysroot, pseudo): |
349 | """ | 355 | """ |
@@ -437,13 +443,16 @@ class Partition(): | |||
437 | 443 | ||
438 | extraopts = self.mkfs_extraopts or "-i 8192" | 444 | extraopts = self.mkfs_extraopts or "-i 8192" |
439 | 445 | ||
446 | # use hash_seed to generate reproducible ext4 images | ||
447 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None) | ||
448 | |||
440 | label_str = "" | 449 | label_str = "" |
441 | if self.label: | 450 | if self.label: |
442 | label_str = "-L %s" % self.label | 451 | label_str = "-L %s" % self.label |
443 | 452 | ||
444 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ | 453 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ |
445 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) | 454 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) |
446 | exec_native_cmd(mkfs_cmd, native_sysroot) | 455 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) |
447 | 456 | ||
448 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 457 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
449 | 458 | ||
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg-efi.py index 13a9cddf4e..7cc5131541 100644 --- a/scripts/lib/wic/plugins/source/bootimg-efi.py +++ b/scripts/lib/wic/plugins/source/bootimg-efi.py | |||
@@ -428,10 +428,10 @@ class BootimgEFIPlugin(SourcePlugin): | |||
428 | elif source_params['loader'] == 'uefi-kernel': | 428 | elif source_params['loader'] == 'uefi-kernel': |
429 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | 429 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") |
430 | if not kernel: | 430 | if not kernel: |
431 | raise WicError("Empty KERNEL_IMAGETYPE %s\n" % target) | 431 | raise WicError("Empty KERNEL_IMAGETYPE") |
432 | target = get_bitbake_var("TARGET_SYS") | 432 | target = get_bitbake_var("TARGET_SYS") |
433 | if not target: | 433 | if not target: |
434 | raise WicError("Unknown arch (TARGET_SYS) %s\n" % target) | 434 | raise WicError("Empty TARGET_SYS") |
435 | 435 | ||
436 | if re.match("x86_64", target): | 436 | if re.match("x86_64", target): |
437 | kernel_efi_image = "bootx64.efi" | 437 | kernel_efi_image = "bootx64.efi" |
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg-partition.py index 1071d1af3f..589853a439 100644 --- a/scripts/lib/wic/plugins/source/bootimg-partition.py +++ b/scripts/lib/wic/plugins/source/bootimg-partition.py | |||
@@ -16,7 +16,7 @@ import logging | |||
16 | import os | 16 | import os |
17 | import re | 17 | import re |
18 | 18 | ||
19 | from glob import glob | 19 | from oe.bootfiles import get_boot_files |
20 | 20 | ||
21 | from wic import WicError | 21 | from wic import WicError |
22 | from wic.engine import get_custom_config | 22 | from wic.engine import get_custom_config |
@@ -66,42 +66,7 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
66 | 66 | ||
67 | logger.debug('Boot files: %s', boot_files) | 67 | logger.debug('Boot files: %s', boot_files) |
68 | 68 | ||
69 | # list of tuples (src_name, dst_name) | 69 | cls.install_task = get_boot_files(kernel_dir, boot_files) |
70 | deploy_files = [] | ||
71 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | ||
72 | if ';' in src_entry: | ||
73 | dst_entry = tuple(src_entry.split(';')) | ||
74 | if not dst_entry[0] or not dst_entry[1]: | ||
75 | raise WicError('Malformed boot file entry: %s' % src_entry) | ||
76 | else: | ||
77 | dst_entry = (src_entry, src_entry) | ||
78 | |||
79 | logger.debug('Destination entry: %r', dst_entry) | ||
80 | deploy_files.append(dst_entry) | ||
81 | |||
82 | cls.install_task = []; | ||
83 | for deploy_entry in deploy_files: | ||
84 | src, dst = deploy_entry | ||
85 | if '*' in src: | ||
86 | # by default install files under their basename | ||
87 | entry_name_fn = os.path.basename | ||
88 | if dst != src: | ||
89 | # unless a target name was given, then treat name | ||
90 | # as a directory and append a basename | ||
91 | entry_name_fn = lambda name: \ | ||
92 | os.path.join(dst, | ||
93 | os.path.basename(name)) | ||
94 | |||
95 | srcs = glob(os.path.join(kernel_dir, src)) | ||
96 | |||
97 | logger.debug('Globbed sources: %s', ', '.join(srcs)) | ||
98 | for entry in srcs: | ||
99 | src = os.path.relpath(entry, kernel_dir) | ||
100 | entry_dst_name = entry_name_fn(entry) | ||
101 | cls.install_task.append((src, entry_dst_name)) | ||
102 | else: | ||
103 | cls.install_task.append((src, dst)) | ||
104 | |||
105 | if source_params.get('loader') != "u-boot": | 70 | if source_params.get('loader') != "u-boot": |
106 | return | 71 | return |
107 | 72 | ||