diff options
Diffstat (limited to 'scripts/lib')
45 files changed, 898 insertions, 1601 deletions
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html index ad4a93ed02..86435273cf 100644 --- a/scripts/lib/build_perf/html/measurement_chart.html +++ b/scripts/lib/build_perf/html/measurement_chart.html | |||
@@ -2,7 +2,7 @@ | |||
2 | // Get raw data | 2 | // Get raw data |
3 | const rawData = [ | 3 | const rawData = [ |
4 | {% for sample in measurement.samples %} | 4 | {% for sample in measurement.samples %} |
5 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}], | 5 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'], |
6 | {% endfor %} | 6 | {% endfor %} |
7 | ]; | 7 | ]; |
8 | 8 | ||
@@ -26,27 +26,37 @@ | |||
26 | ] | 26 | ] |
27 | }); | 27 | }); |
28 | 28 | ||
29 | const commitCountList = rawData.map(([commit, value, time]) => { | ||
30 | return commit | ||
31 | }); | ||
32 | |||
33 | const commitCountData = rawData.map(([commit, value, time]) => { | ||
34 | return updateValue(value) | ||
35 | }); | ||
36 | |||
29 | // Set chart options | 37 | // Set chart options |
30 | const option = { | 38 | const option_start_time = { |
31 | tooltip: { | 39 | tooltip: { |
32 | trigger: 'axis', | 40 | trigger: 'axis', |
33 | valueFormatter: (value) => { | 41 | enterable: true, |
34 | const commitNumber = rawData.filter(([commit, dataValue, time]) => updateValue(dataValue) === value) | 42 | position: function (point, params, dom, rect, size) { |
43 | return [point[0], '0%']; | ||
44 | }, | ||
45 | formatter: function (param) { | ||
46 | const value = param[0].value[1] | ||
47 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
48 | const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)'); | ||
49 | |||
50 | // Add commit hash to the tooltip as a link | ||
51 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
35 | if ('{{ measurement.value_type.quantity }}' == 'time') { | 52 | if ('{{ measurement.value_type.quantity }}' == 'time') { |
36 | const hours = Math.floor(value/60) | 53 | const hours = Math.floor(value/60) |
37 | const minutes = Math.floor(value % 60) | 54 | const minutes = Math.floor(value % 60) |
38 | const seconds = Math.floor((value * 60) % 60) | 55 | const seconds = Math.floor((value * 60) % 60) |
39 | return [ | 56 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` |
40 | hours + ':' + minutes + ':' + seconds + ', ' + | ||
41 | 'commit number: ' + commitNumber[0][0] | ||
42 | ] | ||
43 | } | 57 | } |
44 | return [ | 58 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` |
45 | value.toFixed(2) + ' MB' + ', ' + | 59 | ;} |
46 | 'commit number: ' + commitNumber[0][0] | ||
47 | ] | ||
48 | }, | ||
49 | |||
50 | }, | 60 | }, |
51 | xAxis: { | 61 | xAxis: { |
52 | type: 'time', | 62 | type: 'time', |
@@ -72,29 +82,87 @@ | |||
72 | { | 82 | { |
73 | name: '{{ measurement.value_type.quantity }}', | 83 | name: '{{ measurement.value_type.quantity }}', |
74 | type: 'line', | 84 | type: 'line', |
75 | step: 'start', | ||
76 | symbol: 'none', | 85 | symbol: 'none', |
77 | data: data | 86 | data: data |
78 | } | 87 | } |
79 | ] | 88 | ] |
80 | }; | 89 | }; |
81 | 90 | ||
91 | const option_commit_count = { | ||
92 | tooltip: { | ||
93 | trigger: 'axis', | ||
94 | enterable: true, | ||
95 | position: function (point, params, dom, rect, size) { | ||
96 | return [point[0], '0%']; | ||
97 | }, | ||
98 | formatter: function (param) { | ||
99 | const value = param[0].value | ||
100 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
101 | const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)'); | ||
102 | // Add commit hash to the tooltip as a link | ||
103 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
104 | if ('{{ measurement.value_type.quantity }}' == 'time') { | ||
105 | const hours = Math.floor(value/60) | ||
106 | const minutes = Math.floor(value % 60) | ||
107 | const seconds = Math.floor((value * 60) % 60) | ||
108 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
109 | } | ||
110 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
111 | ;} | ||
112 | }, | ||
113 | xAxis: { | ||
114 | name: 'Commit count', | ||
115 | type: 'category', | ||
116 | data: commitCountList | ||
117 | }, | ||
118 | yAxis: { | ||
119 | name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB', | ||
120 | type: 'value', | ||
121 | min: function(value) { | ||
122 | return Math.round(value.min - 0.5); | ||
123 | }, | ||
124 | max: function(value) { | ||
125 | return Math.round(value.max + 0.5); | ||
126 | } | ||
127 | }, | ||
128 | dataZoom: [ | ||
129 | { | ||
130 | type: 'slider', | ||
131 | xAxisIndex: 0, | ||
132 | filterMode: 'none' | ||
133 | }, | ||
134 | ], | ||
135 | series: [ | ||
136 | { | ||
137 | name: '{{ measurement.value_type.quantity }}', | ||
138 | type: 'line', | ||
139 | symbol: 'none', | ||
140 | data: commitCountData | ||
141 | } | ||
142 | ] | ||
143 | }; | ||
144 | |||
82 | // Draw chart | 145 | // Draw chart |
83 | const chart_div = document.getElementById('{{ chart_elem_id }}'); | 146 | const draw_chart = (chart_id, option) => { |
84 | // Set dark mode | 147 | let chart_name |
85 | let measurement_chart | 148 | const chart_div = document.getElementById(chart_id); |
86 | if (window.matchMedia('(prefers-color-scheme: dark)').matches) { | 149 | // Set dark mode |
87 | measurement_chart= echarts.init(chart_div, 'dark', { | 150 | if (window.matchMedia('(prefers-color-scheme: dark)').matches) { |
88 | height: 320 | 151 | chart_name= echarts.init(chart_div, 'dark', { |
89 | }); | 152 | height: 320 |
90 | } else { | 153 | }); |
91 | measurement_chart= echarts.init(chart_div, null, { | 154 | } else { |
92 | height: 320 | 155 | chart_name= echarts.init(chart_div, null, { |
156 | height: 320 | ||
157 | }); | ||
158 | } | ||
159 | // Change chart size with browser resize | ||
160 | window.addEventListener('resize', function() { | ||
161 | chart_name.resize(); | ||
93 | }); | 162 | }); |
163 | return chart_name.setOption(option); | ||
94 | } | 164 | } |
95 | // Change chart size with browser resize | 165 | |
96 | window.addEventListener('resize', function() { | 166 | draw_chart('{{ chart_elem_start_time_id }}', option_start_time) |
97 | measurement_chart.resize(); | 167 | draw_chart('{{ chart_elem_commit_count_id }}', option_commit_count) |
98 | }); | ||
99 | measurement_chart.setOption(option); | ||
100 | </script> | 168 | </script> |
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html index 537ed3ee52..28cd80e738 100644 --- a/scripts/lib/build_perf/html/report.html +++ b/scripts/lib/build_perf/html/report.html | |||
@@ -9,7 +9,8 @@ | |||
9 | {% for test in test_data %} | 9 | {% for test in test_data %} |
10 | {% if test.status == 'SUCCESS' %} | 10 | {% if test.status == 'SUCCESS' %} |
11 | {% for measurement in test.measurements %} | 11 | {% for measurement in test.measurements %} |
12 | {% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %} | 12 | {% set chart_elem_start_time_id = test.name + '_' + measurement.name + '_chart_start_time' %} |
13 | {% set chart_elem_commit_count_id = test.name + '_' + measurement.name + '_chart_commit_count' %} | ||
13 | {% include 'measurement_chart.html' %} | 14 | {% include 'measurement_chart.html' %} |
14 | {% endfor %} | 15 | {% endfor %} |
15 | {% endif %} | 16 | {% endif %} |
@@ -116,6 +117,22 @@ a { | |||
116 | a:hover { | 117 | a:hover { |
117 | color: #8080ff; | 118 | color: #8080ff; |
118 | } | 119 | } |
120 | button { | ||
121 | background-color: #F3F4F6; | ||
122 | border: none; | ||
123 | outline: none; | ||
124 | cursor: pointer; | ||
125 | padding: 10px 12px; | ||
126 | transition: 0.3s; | ||
127 | border-radius: 8px; | ||
128 | color: #3A4353; | ||
129 | } | ||
130 | button:hover { | ||
131 | background-color: #d6d9e0; | ||
132 | } | ||
133 | .tab button.active { | ||
134 | background-color: #d6d9e0; | ||
135 | } | ||
119 | @media (prefers-color-scheme: dark) { | 136 | @media (prefers-color-scheme: dark) { |
120 | :root { | 137 | :root { |
121 | --text: #e9e8fa; | 138 | --text: #e9e8fa; |
@@ -126,6 +143,16 @@ a:hover { | |||
126 | --trborder: #212936; | 143 | --trborder: #212936; |
127 | --chartborder: #b1b0bf; | 144 | --chartborder: #b1b0bf; |
128 | } | 145 | } |
146 | button { | ||
147 | background-color: #28303E; | ||
148 | color: #fff; | ||
149 | } | ||
150 | button:hover { | ||
151 | background-color: #545a69; | ||
152 | } | ||
153 | .tab button.active { | ||
154 | background-color: #545a69; | ||
155 | } | ||
129 | } | 156 | } |
130 | </style> | 157 | </style> |
131 | 158 | ||
@@ -233,7 +260,18 @@ a:hover { | |||
233 | <tr> | 260 | <tr> |
234 | <td style="width: 75%"> | 261 | <td style="width: 75%"> |
235 | {# Linechart #} | 262 | {# Linechart #} |
236 | <div id="{{ test.name }}_{{ measurement.name }}_chart"></div> | 263 | <div class="tab {{ test.name }}_{{ measurement.name }}_tablinks"> |
264 | <button class="tablinks active" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_start_time', '{{ test.name }}_{{ measurement.name }}')">Chart with start time</button> | ||
265 | <button class="tablinks" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_commit_count', '{{ test.name }}_{{ measurement.name }}')">Chart with commit count</button> | ||
266 | </div> | ||
267 | <div class="{{ test.name }}_{{ measurement.name }}_tabcontent"> | ||
268 | <div id="{{ test.name }}_{{ measurement.name }}_start_time" class="tabcontent" style="display: block;"> | ||
269 | <div id="{{ test.name }}_{{ measurement.name }}_chart_start_time"></div> | ||
270 | </div> | ||
271 | <div id="{{ test.name }}_{{ measurement.name }}_commit_count" class="tabcontent" style="display: none;"> | ||
272 | <div id="{{ test.name }}_{{ measurement.name }}_chart_commit_count"></div> | ||
273 | </div> | ||
274 | </div> | ||
237 | </td> | 275 | </td> |
238 | <td> | 276 | <td> |
239 | {# Measurement statistics #} | 277 | {# Measurement statistics #} |
@@ -340,6 +378,31 @@ a:hover { | |||
340 | <div class="preformatted">{{ test.message }}</div> | 378 | <div class="preformatted">{{ test.message }}</div> |
341 | {% endif %} | 379 | {% endif %} |
342 | {% endfor %} | 380 | {% endfor %} |
343 | </div></body> | 381 | </div> |
344 | </html> | ||
345 | 382 | ||
383 | <script> | ||
384 | function openChart(event, chartType, chartName) { | ||
385 | let i, tabcontents, tablinks | ||
386 | tabcontents = document.querySelectorAll(`.${chartName}_tabcontent > .tabcontent`); | ||
387 | tabcontents.forEach((tabcontent) => { | ||
388 | tabcontent.style.display = "none"; | ||
389 | }); | ||
390 | |||
391 | tablinks = document.querySelectorAll(`.${chartName}_tablinks > .tablinks`); | ||
392 | tablinks.forEach((tabLink) => { | ||
393 | tabLink.classList.remove('active'); | ||
394 | }); | ||
395 | |||
396 | const targetTab = document.getElementById(chartType) | ||
397 | targetTab.style.display = "block"; | ||
398 | |||
399 | // Call resize on the ECharts instance to redraw the chart | ||
400 | const chartContainer = targetTab.querySelector('div') | ||
401 | echarts.init(chartContainer).resize(); | ||
402 | |||
403 | event.currentTarget.classList.add('active'); | ||
404 | } | ||
405 | </script> | ||
406 | |||
407 | </body> | ||
408 | </html> | ||
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py index 62ecdfe390..86aadf39a6 100644 --- a/scripts/lib/checklayer/__init__.py +++ b/scripts/lib/checklayer/__init__.py | |||
@@ -452,3 +452,15 @@ def compare_signatures(old_sigs, curr_sigs): | |||
452 | msg.extend([' ' + line for line in output.splitlines()]) | 452 | msg.extend([' ' + line for line in output.splitlines()]) |
453 | msg.append('') | 453 | msg.append('') |
454 | return '\n'.join(msg) | 454 | return '\n'.join(msg) |
455 | |||
456 | |||
457 | def get_git_toplevel(directory): | ||
458 | """ | ||
459 | Try and find the top of the git repository that directory might be in. | ||
460 | Returns the top-level directory, or None. | ||
461 | """ | ||
462 | cmd = ["git", "-C", directory, "rev-parse", "--show-toplevel"] | ||
463 | try: | ||
464 | return subprocess.check_output(cmd, text=True).strip() | ||
465 | except: | ||
466 | return None | ||
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py index 97b16f78c8..ddead69a7b 100644 --- a/scripts/lib/checklayer/cases/common.py +++ b/scripts/lib/checklayer/cases/common.py | |||
@@ -7,7 +7,7 @@ import glob | |||
7 | import os | 7 | import os |
8 | import unittest | 8 | import unittest |
9 | import re | 9 | import re |
10 | from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures | 10 | from checklayer import get_signatures, LayerType, check_command, compare_signatures, get_git_toplevel |
11 | from checklayer.case import OECheckLayerTestCase | 11 | from checklayer.case import OECheckLayerTestCase |
12 | 12 | ||
13 | class CommonCheckLayer(OECheckLayerTestCase): | 13 | class CommonCheckLayer(OECheckLayerTestCase): |
@@ -40,6 +40,38 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
40 | email_regex = re.compile(r"[^@]+@[^@]+") | 40 | email_regex = re.compile(r"[^@]+@[^@]+") |
41 | self.assertTrue(email_regex.match(data)) | 41 | self.assertTrue(email_regex.match(data)) |
42 | 42 | ||
43 | def find_file_by_name(self, globs): | ||
44 | """ | ||
45 | Utility function to find a file that matches the specified list of | ||
46 | globs, in either the layer directory itself or the repository top-level | ||
47 | directory. | ||
48 | """ | ||
49 | directories = [self.tc.layer["path"]] | ||
50 | toplevel = get_git_toplevel(directories[0]) | ||
51 | if toplevel: | ||
52 | directories.append(toplevel) | ||
53 | |||
54 | for path in directories: | ||
55 | for name in globs: | ||
56 | files = glob.glob(os.path.join(path, name)) | ||
57 | if files: | ||
58 | return sorted(files)[0] | ||
59 | return None | ||
60 | |||
61 | def test_security(self): | ||
62 | """ | ||
63 | Test that the layer has a SECURITY.md (or similar) file, either in the | ||
64 | layer itself or at the top of the containing git repository. | ||
65 | """ | ||
66 | if self.tc.layer["type"] == LayerType.CORE: | ||
67 | raise unittest.SkipTest("Core layer's SECURITY is top level") | ||
68 | |||
69 | filename = self.find_file_by_name(("SECURITY", "SECURITY.*")) | ||
70 | self.assertTrue(filename, msg="Layer doesn't contain a SECURITY.md file.") | ||
71 | |||
72 | size = os.path.getsize(filename) | ||
73 | self.assertGreater(size, 0, msg=f"{filename} has no content.") | ||
74 | |||
43 | def test_parse(self): | 75 | def test_parse(self): |
44 | check_command('Layer %s failed to parse.' % self.tc.layer['name'], | 76 | check_command('Layer %s failed to parse.' % self.tc.layer['name'], |
45 | 'bitbake -p') | 77 | 'bitbake -p') |
@@ -72,7 +104,6 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
72 | self.tc.layer['name']) | 104 | self.tc.layer['name']) |
73 | self.fail('\n'.join(msg)) | 105 | self.fail('\n'.join(msg)) |
74 | 106 | ||
75 | @unittest.expectedFailure | ||
76 | def test_patches_upstream_status(self): | 107 | def test_patches_upstream_status(self): |
77 | import sys | 108 | import sys |
78 | sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) | 109 | sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) |
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py index 6133c1c5b4..fa6e1a34fd 100644 --- a/scripts/lib/devtool/__init__.py +++ b/scripts/lib/devtool/__init__.py | |||
@@ -234,7 +234,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
234 | f.write(line) | 234 | f.write(line) |
235 | 235 | ||
236 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) | 236 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) |
237 | bb.process.run('git tag -f %s' % basetag, cwd=repodir) | 237 | bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir) |
238 | 238 | ||
239 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, | 239 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, |
240 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe | 240 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe |
@@ -256,7 +256,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
256 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) | 256 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) |
257 | found = False | 257 | found = False |
258 | if os.path.exists(os.path.join(repodir, '.gitmodules')): | 258 | if os.path.exists(os.path.join(repodir, '.gitmodules')): |
259 | bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir) | 259 | bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir) |
260 | 260 | ||
261 | def recipe_to_append(recipefile, config, wildcard=False): | 261 | def recipe_to_append(recipefile, config, wildcard=False): |
262 | """ | 262 | """ |
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py index 935ffab46c..0b2c3d33dc 100644 --- a/scripts/lib/devtool/build.py +++ b/scripts/lib/devtool/build.py | |||
@@ -49,7 +49,7 @@ def build(args, config, basepath, workspace): | |||
49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) | 49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) |
50 | if not rd: | 50 | if not rd: |
51 | return 1 | 51 | return 1 |
52 | deploytask = 'do_deploy' in rd.getVar('__BBTASKS') | 52 | deploytask = 'do_deploy' in bb.build.listtasks(rd) |
53 | finally: | 53 | finally: |
54 | tinfoil.shutdown() | 54 | tinfoil.shutdown() |
55 | 55 | ||
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py index 1cd4831d2b..990303982c 100644 --- a/scripts/lib/devtool/build_sdk.py +++ b/scripts/lib/devtool/build_sdk.py | |||
@@ -5,14 +5,7 @@ | |||
5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
6 | # | 6 | # |
7 | 7 | ||
8 | import os | ||
9 | import subprocess | ||
10 | import logging | 8 | import logging |
11 | import glob | ||
12 | import shutil | ||
13 | import errno | ||
14 | import sys | ||
15 | import tempfile | ||
16 | from devtool import DevtoolError | 9 | from devtool import DevtoolError |
17 | from devtool import build_image | 10 | from devtool import build_image |
18 | 11 | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py index a62b93224e..ee5bb57265 100644 --- a/scripts/lib/devtool/ide_plugins/ide_code.py +++ b/scripts/lib/devtool/ide_plugins/ide_code.py | |||
@@ -161,7 +161,6 @@ class IdeVSCode(IdeBase): | |||
161 | if modified_recipe.build_tool is not BuildTool.CMAKE: | 161 | if modified_recipe.build_tool is not BuildTool.CMAKE: |
162 | return | 162 | return |
163 | recommendations += [ | 163 | recommendations += [ |
164 | "twxs.cmake", | ||
165 | "ms-vscode.cmake-tools", | 164 | "ms-vscode.cmake-tools", |
166 | "ms-vscode.cpptools", | 165 | "ms-vscode.cpptools", |
167 | "ms-vscode.cpptools-extension-pack", | 166 | "ms-vscode.cpptools-extension-pack", |
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py index 65873b088d..931408fa74 100755 --- a/scripts/lib/devtool/ide_sdk.py +++ b/scripts/lib/devtool/ide_sdk.py | |||
@@ -167,7 +167,7 @@ class RecipeImage: | |||
167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') | 167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') |
168 | 168 | ||
169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( | 169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( |
170 | 'IMAGE_INSTALL') | 170 | 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES') |
171 | 171 | ||
172 | @property | 172 | @property |
173 | def debug_support(self): | 173 | def debug_support(self): |
@@ -288,6 +288,7 @@ class RecipeModified: | |||
288 | self.bblayers = None | 288 | self.bblayers = None |
289 | self.bpn = None | 289 | self.bpn = None |
290 | self.d = None | 290 | self.d = None |
291 | self.debug_build = None | ||
291 | self.fakerootcmd = None | 292 | self.fakerootcmd = None |
292 | self.fakerootenv = None | 293 | self.fakerootenv = None |
293 | self.libdir = None | 294 | self.libdir = None |
@@ -333,7 +334,7 @@ class RecipeModified: | |||
333 | self.srctree = workspace[workspacepn]['srctree'] | 334 | self.srctree = workspace[workspacepn]['srctree'] |
334 | # Need to grab this here in case the source is within a subdirectory | 335 | # Need to grab this here in case the source is within a subdirectory |
335 | self.real_srctree = get_real_srctree( | 336 | self.real_srctree = get_real_srctree( |
336 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR')) | 337 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR')) |
337 | self.bbappend = workspace[workspacepn]['bbappend'] | 338 | self.bbappend = workspace[workspacepn]['bbappend'] |
338 | 339 | ||
339 | self.ide_sdk_dir = os.path.join( | 340 | self.ide_sdk_dir = os.path.join( |
@@ -348,6 +349,7 @@ class RecipeModified: | |||
348 | self.bpn = recipe_d.getVar('BPN') | 349 | self.bpn = recipe_d.getVar('BPN') |
349 | self.cxx = recipe_d.getVar('CXX') | 350 | self.cxx = recipe_d.getVar('CXX') |
350 | self.d = recipe_d.getVar('D') | 351 | self.d = recipe_d.getVar('D') |
352 | self.debug_build = recipe_d.getVar('DEBUG_BUILD') | ||
351 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') | 353 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') |
352 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') | 354 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') |
353 | self.libdir = recipe_d.getVar('libdir') | 355 | self.libdir = recipe_d.getVar('libdir') |
@@ -389,17 +391,6 @@ class RecipeModified: | |||
389 | self.recipe_id = self.bpn + "-" + self.package_arch | 391 | self.recipe_id = self.bpn + "-" + self.package_arch |
390 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch | 392 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch |
391 | 393 | ||
392 | def append_to_bbappend(self, append_text): | ||
393 | with open(self.bbappend, 'a') as bbap: | ||
394 | bbap.write(append_text) | ||
395 | |||
396 | def remove_from_bbappend(self, append_text): | ||
397 | with open(self.bbappend, 'r') as bbap: | ||
398 | text = bbap.read() | ||
399 | new_text = text.replace(append_text, '') | ||
400 | with open(self.bbappend, 'w') as bbap: | ||
401 | bbap.write(new_text) | ||
402 | |||
403 | @staticmethod | 394 | @staticmethod |
404 | def is_valid_shell_variable(var): | 395 | def is_valid_shell_variable(var): |
405 | """Skip strange shell variables like systemd | 396 | """Skip strange shell variables like systemd |
@@ -412,34 +403,6 @@ class RecipeModified: | |||
412 | return True | 403 | return True |
413 | return False | 404 | return False |
414 | 405 | ||
415 | def debug_build_config(self, args): | ||
416 | """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise""" | ||
417 | if self.build_tool is BuildTool.CMAKE: | ||
418 | append_text = os.linesep + \ | ||
419 | 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep | ||
420 | if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
421 | self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = { | ||
422 | "type": "STRING", | ||
423 | "value": "Debug", | ||
424 | } | ||
425 | self.append_to_bbappend(append_text) | ||
426 | elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
427 | del self.cmake_cache_vars['CMAKE_BUILD_TYPE'] | ||
428 | self.remove_from_bbappend(append_text) | ||
429 | elif self.build_tool is BuildTool.MESON: | ||
430 | append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep | ||
431 | if args.debug_build_config and self.meson_buildtype != "debug": | ||
432 | self.mesonopts.replace( | ||
433 | '--buildtype ' + self.meson_buildtype, '--buildtype debug') | ||
434 | self.append_to_bbappend(append_text) | ||
435 | elif self.meson_buildtype == "debug": | ||
436 | self.mesonopts.replace( | ||
437 | '--buildtype debug', '--buildtype plain') | ||
438 | self.remove_from_bbappend(append_text) | ||
439 | elif args.debug_build_config: | ||
440 | logger.warn( | ||
441 | "--debug-build-config is not implemented for this build tool yet.") | ||
442 | |||
443 | def solib_search_path(self, image): | 406 | def solib_search_path(self, image): |
444 | """Search for debug symbols in the rootfs and rootfs-dbg | 407 | """Search for debug symbols in the rootfs and rootfs-dbg |
445 | 408 | ||
@@ -493,7 +456,7 @@ class RecipeModified: | |||
493 | 456 | ||
494 | vars = (key for key in d.keys() if not key.startswith( | 457 | vars = (key for key in d.keys() if not key.startswith( |
495 | "__") and not d.getVarFlag(key, "func", False)) | 458 | "__") and not d.getVarFlag(key, "func", False)) |
496 | for var in vars: | 459 | for var in sorted(vars): |
497 | func = d.getVarFlag(var, "func", False) | 460 | func = d.getVarFlag(var, "func", False) |
498 | if d.getVarFlag(var, 'python', False) and func: | 461 | if d.getVarFlag(var, 'python', False) and func: |
499 | continue | 462 | continue |
@@ -545,7 +508,7 @@ class RecipeModified: | |||
545 | cache_vars = {} | 508 | cache_vars = {} |
546 | oecmake_args = d.getVar('OECMAKE_ARGS').split() | 509 | oecmake_args = d.getVar('OECMAKE_ARGS').split() |
547 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() | 510 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() |
548 | for param in oecmake_args + extra_oecmake: | 511 | for param in sorted(oecmake_args + extra_oecmake): |
549 | d_pref = "-D" | 512 | d_pref = "-D" |
550 | if param.startswith(d_pref): | 513 | if param.startswith(d_pref): |
551 | param = param[len(d_pref):] | 514 | param = param[len(d_pref):] |
@@ -712,42 +675,6 @@ class RecipeModified: | |||
712 | binaries.append(abs_name[d_len:]) | 675 | binaries.append(abs_name[d_len:]) |
713 | return sorted(binaries) | 676 | return sorted(binaries) |
714 | 677 | ||
715 | def gen_delete_package_dirs(self): | ||
716 | """delete folders of package tasks | ||
717 | |||
718 | This is a workaround for and issue with recipes having their sources | ||
719 | downloaded as file:// | ||
720 | This likely breaks pseudo like: | ||
721 | path mismatch [3 links]: ino 79147802 db | ||
722 | .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/ | ||
723 | cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp | ||
724 | .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp | ||
725 | Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue. | ||
726 | """ | ||
727 | cmd_lines = ['#!/bin/sh'] | ||
728 | |||
729 | # Set up the appropriate environment | ||
730 | newenv = dict(os.environ) | ||
731 | for varvalue in self.fakerootenv.split(): | ||
732 | if '=' in varvalue: | ||
733 | splitval = varvalue.split('=', 1) | ||
734 | newenv[splitval[0]] = splitval[1] | ||
735 | |||
736 | # Replicate the environment variables from bitbake | ||
737 | for var, val in newenv.items(): | ||
738 | if not RecipeModified.is_valid_shell_variable(var): | ||
739 | continue | ||
740 | cmd_lines.append('%s="%s"' % (var, val)) | ||
741 | cmd_lines.append('export %s' % var) | ||
742 | |||
743 | # Delete the folders | ||
744 | pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [ | ||
745 | "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]]) | ||
746 | cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs) | ||
747 | cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd)) | ||
748 | |||
749 | return self.write_script(cmd_lines, 'delete_package_dirs') | ||
750 | |||
751 | def gen_deploy_target_script(self, args): | 678 | def gen_deploy_target_script(self, args): |
752 | """Generate a script which does what devtool deploy-target does | 679 | """Generate a script which does what devtool deploy-target does |
753 | 680 | ||
@@ -785,8 +712,6 @@ class RecipeModified: | |||
785 | """Generate a script which does install and deploy""" | 712 | """Generate a script which does install and deploy""" |
786 | cmd_lines = ['#!/bin/bash'] | 713 | cmd_lines = ['#!/bin/bash'] |
787 | 714 | ||
788 | cmd_lines.append(self.gen_delete_package_dirs()) | ||
789 | |||
790 | # . oe-init-build-env $BUILDDIR | 715 | # . oe-init-build-env $BUILDDIR |
791 | # Note: Sourcing scripts with arguments requires bash | 716 | # Note: Sourcing scripts with arguments requires bash |
792 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( | 717 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( |
@@ -988,6 +913,13 @@ def ide_setup(args, config, basepath, workspace): | |||
988 | recipe_modified.gen_meson_wrapper() | 913 | recipe_modified.gen_meson_wrapper() |
989 | ide.setup_modified_recipe( | 914 | ide.setup_modified_recipe( |
990 | args, recipe_image, recipe_modified) | 915 | args, recipe_image, recipe_modified) |
916 | |||
917 | if recipe_modified.debug_build != '1': | ||
918 | logger.warn( | ||
919 | 'Recipe %s is compiled with release build configuration. ' | ||
920 | 'You might want to add DEBUG_BUILD = "1" to %s. ' | ||
921 | 'Note that devtool modify --debug-build can do this automatically.', | ||
922 | recipe_modified.name, recipe_modified.bbappend) | ||
991 | else: | 923 | else: |
992 | raise DevtoolError("Must not end up here.") | 924 | raise DevtoolError("Must not end up here.") |
993 | 925 | ||
@@ -995,6 +927,15 @@ def ide_setup(args, config, basepath, workspace): | |||
995 | def register_commands(subparsers, context): | 927 | def register_commands(subparsers, context): |
996 | """Register devtool subcommands from this plugin""" | 928 | """Register devtool subcommands from this plugin""" |
997 | 929 | ||
930 | # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE | ||
931 | # configuration is generated. In the case of the eSDK, the bootstrapping is performed | ||
932 | # during the installation of the eSDK installer. Running the ide-sdk plugin from an | ||
933 | # eSDK installer-based setup would require skipping the bootstrapping and probably | ||
934 | # taking some other differences into account when generating the IDE configurations. | ||
935 | # This would be possible. But it is not implemented. | ||
936 | if context.fixed_setup: | ||
937 | return | ||
938 | |||
998 | global ide_plugins | 939 | global ide_plugins |
999 | 940 | ||
1000 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. | 941 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. |
@@ -1015,7 +956,7 @@ def register_commands(subparsers, context): | |||
1015 | help='Setup the SDK and configure the IDE') | 956 | help='Setup the SDK and configure the IDE') |
1016 | parser_ide_sdk.add_argument( | 957 | parser_ide_sdk.add_argument( |
1017 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' | 958 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' |
1018 | 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.') | 959 | 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.') |
1019 | parser_ide_sdk.add_argument( | 960 | parser_ide_sdk.add_argument( |
1020 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, | 961 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, |
1021 | help='Different SDK types are supported:\n' | 962 | help='Different SDK types are supported:\n' |
@@ -1065,6 +1006,4 @@ def register_commands(subparsers, context): | |||
1065 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') | 1006 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') |
1066 | parser_ide_sdk.add_argument( | 1007 | parser_ide_sdk.add_argument( |
1067 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') | 1008 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') |
1068 | parser_ide_sdk.add_argument( | ||
1069 | '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true') | ||
1070 | parser_ide_sdk.set_defaults(func=ide_setup) | 1009 | parser_ide_sdk.set_defaults(func=ide_setup) |
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py index 18daef30c3..1054960551 100644 --- a/scripts/lib/devtool/menuconfig.py +++ b/scripts/lib/devtool/menuconfig.py | |||
@@ -23,9 +23,6 @@ | |||
23 | import os | 23 | import os |
24 | import bb | 24 | import bb |
25 | import logging | 25 | import logging |
26 | import argparse | ||
27 | import re | ||
28 | import glob | ||
29 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command | 26 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command |
30 | from devtool import check_workspace_recipe | 27 | from devtool import check_workspace_recipe |
31 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
@@ -34,7 +31,6 @@ def menuconfig(args, config, basepath, workspace): | |||
34 | """Entry point for the devtool 'menuconfig' subcommand""" | 31 | """Entry point for the devtool 'menuconfig' subcommand""" |
35 | 32 | ||
36 | rd = "" | 33 | rd = "" |
37 | kconfigpath = "" | ||
38 | pn_src = "" | 34 | pn_src = "" |
39 | localfilesdir = "" | 35 | localfilesdir = "" |
40 | workspace_dir = "" | 36 | workspace_dir = "" |
@@ -51,7 +47,6 @@ def menuconfig(args, config, basepath, workspace): | |||
51 | raise DevtoolError("This recipe does not support menuconfig option") | 47 | raise DevtoolError("This recipe does not support menuconfig option") |
52 | 48 | ||
53 | workspace_dir = os.path.join(config.workspace_path,'sources') | 49 | workspace_dir = os.path.join(config.workspace_path,'sources') |
54 | kconfigpath = rd.getVar('B') | ||
55 | pn_src = os.path.join(workspace_dir,pn) | 50 | pn_src = os.path.join(workspace_dir,pn) |
56 | 51 | ||
57 | # add check to see if oe_local_files exists or not | 52 | # add check to see if oe_local_files exists or not |
@@ -70,7 +65,7 @@ def menuconfig(args, config, basepath, workspace): | |||
70 | logger.info('Launching menuconfig') | 65 | logger.info('Launching menuconfig') |
71 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) | 66 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) |
72 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') | 67 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') |
73 | res = standard._create_kconfig_diff(pn_src,rd,fragment) | 68 | standard._create_kconfig_diff(pn_src,rd,fragment) |
74 | 69 | ||
75 | return 0 | 70 | return 0 |
76 | 71 | ||
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 05161942b7..1fd5947c41 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -18,11 +18,13 @@ import argparse_oe | |||
18 | import scriptutils | 18 | import scriptutils |
19 | import errno | 19 | import errno |
20 | import glob | 20 | import glob |
21 | import filecmp | ||
22 | from collections import OrderedDict | 21 | from collections import OrderedDict |
22 | |||
23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError | 23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError |
24 | from devtool import parse_recipe | 24 | from devtool import parse_recipe |
25 | 25 | ||
26 | import bb.utils | ||
27 | |||
26 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
27 | 29 | ||
28 | override_branch_prefix = 'devtool-override-' | 30 | override_branch_prefix = 'devtool-override-' |
@@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-' | |||
30 | 32 | ||
31 | def add(args, config, basepath, workspace): | 33 | def add(args, config, basepath, workspace): |
32 | """Entry point for the devtool 'add' subcommand""" | 34 | """Entry point for the devtool 'add' subcommand""" |
33 | import bb | 35 | import bb.data |
36 | import bb.process | ||
34 | import oe.recipeutils | 37 | import oe.recipeutils |
35 | 38 | ||
36 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: | 39 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: |
@@ -206,7 +209,7 @@ def add(args, config, basepath, workspace): | |||
206 | for fn in os.listdir(tempdir): | 209 | for fn in os.listdir(tempdir): |
207 | shutil.move(os.path.join(tempdir, fn), recipedir) | 210 | shutil.move(os.path.join(tempdir, fn), recipedir) |
208 | else: | 211 | else: |
209 | raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) | 212 | raise DevtoolError(f'Failed to create a recipe file for source {source}') |
210 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) | 213 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) |
211 | if os.path.exists(attic_recipe): | 214 | if os.path.exists(attic_recipe): |
212 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) | 215 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) |
@@ -305,6 +308,7 @@ def add(args, config, basepath, workspace): | |||
305 | 308 | ||
306 | def _check_compatible_recipe(pn, d): | 309 | def _check_compatible_recipe(pn, d): |
307 | """Check if the recipe is supported by devtool""" | 310 | """Check if the recipe is supported by devtool""" |
311 | import bb.data | ||
308 | if pn == 'perf': | 312 | if pn == 'perf': |
309 | raise DevtoolError("The perf recipe does not actually check out " | 313 | raise DevtoolError("The perf recipe does not actually check out " |
310 | "source and thus cannot be supported by this tool", | 314 | "source and thus cannot be supported by this tool", |
@@ -374,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
374 | 378 | ||
375 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | 379 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): |
376 | """List contents of a git treeish""" | 380 | """List contents of a git treeish""" |
377 | import bb | 381 | import bb.process |
378 | cmd = ['git', 'ls-tree', '-z', treeish] | 382 | cmd = ['git', 'ls-tree', '-z', treeish] |
379 | if recursive: | 383 | if recursive: |
380 | cmd.append('-r') | 384 | cmd.append('-r') |
@@ -387,6 +391,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | |||
387 | ret[split[3]] = split[0:3] | 391 | ret[split[3]] = split[0:3] |
388 | return ret | 392 | return ret |
389 | 393 | ||
394 | def _git_modified(repodir): | ||
395 | """List the difference between HEAD and the index""" | ||
396 | import bb.process | ||
397 | cmd = ['git', 'status', '--porcelain'] | ||
398 | out, _ = bb.process.run(cmd, cwd=repodir) | ||
399 | ret = [] | ||
400 | if out: | ||
401 | for line in out.split("\n"): | ||
402 | if line and not line.startswith('??'): | ||
403 | ret.append(line[3:]) | ||
404 | return ret | ||
405 | |||
406 | |||
390 | def _git_exclude_path(srctree, path): | 407 | def _git_exclude_path(srctree, path): |
391 | """Return pathspec (list of paths) that excludes certain path""" | 408 | """Return pathspec (list of paths) that excludes certain path""" |
392 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - | 409 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - |
@@ -414,8 +431,6 @@ def _ls_tree(directory): | |||
414 | 431 | ||
415 | def extract(args, config, basepath, workspace): | 432 | def extract(args, config, basepath, workspace): |
416 | """Entry point for the devtool 'extract' subcommand""" | 433 | """Entry point for the devtool 'extract' subcommand""" |
417 | import bb | ||
418 | |||
419 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 434 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
420 | if not tinfoil: | 435 | if not tinfoil: |
421 | # Error already shown | 436 | # Error already shown |
@@ -438,8 +453,6 @@ def extract(args, config, basepath, workspace): | |||
438 | 453 | ||
439 | def sync(args, config, basepath, workspace): | 454 | def sync(args, config, basepath, workspace): |
440 | """Entry point for the devtool 'sync' subcommand""" | 455 | """Entry point for the devtool 'sync' subcommand""" |
441 | import bb | ||
442 | |||
443 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 456 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
444 | if not tinfoil: | 457 | if not tinfoil: |
445 | # Error already shown | 458 | # Error already shown |
@@ -460,37 +473,11 @@ def sync(args, config, basepath, workspace): | |||
460 | finally: | 473 | finally: |
461 | tinfoil.shutdown() | 474 | tinfoil.shutdown() |
462 | 475 | ||
463 | def symlink_oelocal_files_srctree(rd, srctree): | ||
464 | import oe.patch | ||
465 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | ||
466 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | ||
467 | # (otherwise the recipe won't build as expected) | ||
468 | local_files_dir = os.path.join(srctree, 'oe-local-files') | ||
469 | addfiles = [] | ||
470 | for root, _, files in os.walk(local_files_dir): | ||
471 | relpth = os.path.relpath(root, local_files_dir) | ||
472 | if relpth != '.': | ||
473 | bb.utils.mkdirhier(os.path.join(srctree, relpth)) | ||
474 | for fn in files: | ||
475 | if fn == '.gitignore': | ||
476 | continue | ||
477 | destpth = os.path.join(srctree, relpth, fn) | ||
478 | if os.path.exists(destpth): | ||
479 | os.unlink(destpth) | ||
480 | if relpth != '.': | ||
481 | back_relpth = os.path.relpath(local_files_dir, root) | ||
482 | os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) | ||
483 | else: | ||
484 | os.symlink('oe-local-files/%s' % fn, destpth) | ||
485 | addfiles.append(os.path.join(relpth, fn)) | ||
486 | if addfiles: | ||
487 | oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd) | ||
488 | |||
489 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 476 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
490 | """Extract sources of a recipe""" | 477 | """Extract sources of a recipe""" |
491 | import oe.recipeutils | ||
492 | import oe.patch | ||
493 | import oe.path | 478 | import oe.path |
479 | import bb.data | ||
480 | import bb.process | ||
494 | 481 | ||
495 | pn = d.getVar('PN') | 482 | pn = d.getVar('PN') |
496 | 483 | ||
@@ -555,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
555 | tempbasedir = d.getVar('WORKDIR') | 542 | tempbasedir = d.getVar('WORKDIR') |
556 | bb.utils.mkdirhier(tempbasedir) | 543 | bb.utils.mkdirhier(tempbasedir) |
557 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) | 544 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) |
545 | appendbackup = None | ||
558 | try: | 546 | try: |
559 | tinfoil.logger.setLevel(logging.WARNING) | 547 | tinfoil.logger.setLevel(logging.WARNING) |
560 | 548 | ||
@@ -565,7 +553,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
565 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') | 553 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') |
566 | shutil.copyfile(appendfile, appendbackup) | 554 | shutil.copyfile(appendfile, appendbackup) |
567 | else: | 555 | else: |
568 | appendbackup = None | ||
569 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 556 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
570 | logger.debug('writing append file %s' % appendfile) | 557 | logger.debug('writing append file %s' % appendfile) |
571 | with open(appendfile, 'a') as f: | 558 | with open(appendfile, 'a') as f: |
@@ -638,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
638 | srcsubdir = f.read() | 625 | srcsubdir = f.read() |
639 | except FileNotFoundError as e: | 626 | except FileNotFoundError as e: |
640 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) | 627 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) |
641 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) | 628 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR')))) |
642 | 629 | ||
643 | # Check if work-shared is empty, if yes | 630 | # Check if work-shared is empty, if yes |
644 | # find source and copy to work-shared | 631 | # find source and copy to work-shared |
@@ -657,9 +644,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
657 | elif not os.path.exists(workshareddir): | 644 | elif not os.path.exists(workshareddir): |
658 | oe.path.copyhardlinktree(srcsubdir, workshareddir) | 645 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
659 | 646 | ||
660 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | ||
661 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | ||
662 | |||
663 | if sync: | 647 | if sync: |
664 | try: | 648 | try: |
665 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | 649 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) |
@@ -674,29 +658,8 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
674 | except bb.process.ExecutionError as e: | 658 | except bb.process.ExecutionError as e: |
675 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) | 659 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) |
676 | 660 | ||
677 | # Move the oe-local-files directory to srctree. | ||
678 | # As oe-local-files is not part of the constructed git tree, | ||
679 | # removing it directly during the synchronization might surprise | ||
680 | # the user. Instead, we move it to oe-local-files.bak and remind | ||
681 | # the user in the log message. | ||
682 | if os.path.exists(srctree_localdir + '.bak'): | ||
683 | shutil.rmtree(srctree_localdir + '.bak') | ||
684 | |||
685 | if os.path.exists(srctree_localdir): | ||
686 | logger.info('Backing up current local file directory %s' % srctree_localdir) | ||
687 | shutil.move(srctree_localdir, srctree_localdir + '.bak') | ||
688 | |||
689 | if os.path.exists(tempdir_localdir): | ||
690 | logger.info('Syncing local source files to srctree...') | ||
691 | shutil.copytree(tempdir_localdir, srctree_localdir) | ||
692 | else: | 661 | else: |
693 | # Move oe-local-files directory to srctree | ||
694 | if os.path.exists(tempdir_localdir): | ||
695 | logger.info('Adding local source files to srctree...') | ||
696 | shutil.move(tempdir_localdir, srcsubdir) | ||
697 | |||
698 | shutil.move(srcsubdir, srctree) | 662 | shutil.move(srcsubdir, srctree) |
699 | symlink_oelocal_files_srctree(d, srctree) | ||
700 | 663 | ||
701 | if is_kernel_yocto: | 664 | if is_kernel_yocto: |
702 | logger.info('Copying kernel config to srctree') | 665 | logger.info('Copying kernel config to srctree') |
@@ -715,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
715 | 678 | ||
716 | def _add_md5(config, recipename, filename): | 679 | def _add_md5(config, recipename, filename): |
717 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" | 680 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" |
718 | import bb.utils | ||
719 | |||
720 | def addfile(fn): | 681 | def addfile(fn): |
721 | md5 = bb.utils.md5_file(fn) | 682 | md5 = bb.utils.md5_file(fn) |
722 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: | 683 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: |
@@ -735,7 +696,6 @@ def _add_md5(config, recipename, filename): | |||
735 | def _check_preserve(config, recipename): | 696 | def _check_preserve(config, recipename): |
736 | """Check if a file was manually changed and needs to be saved in 'attic' | 697 | """Check if a file was manually changed and needs to be saved in 'attic' |
737 | directory""" | 698 | directory""" |
738 | import bb.utils | ||
739 | origfile = os.path.join(config.workspace_path, '.devtool_md5') | 699 | origfile = os.path.join(config.workspace_path, '.devtool_md5') |
740 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') | 700 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') |
741 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) | 701 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) |
@@ -766,36 +726,36 @@ def _check_preserve(config, recipename): | |||
766 | 726 | ||
767 | def get_staging_kver(srcdir): | 727 | def get_staging_kver(srcdir): |
768 | # Kernel version from work-shared | 728 | # Kernel version from work-shared |
769 | kerver = [] | 729 | import itertools |
770 | staging_kerVer="" | 730 | try: |
771 | if os.path.exists(srcdir) and os.listdir(srcdir): | ||
772 | with open(os.path.join(srcdir, "Makefile")) as f: | 731 | with open(os.path.join(srcdir, "Makefile")) as f: |
773 | version = [next(f) for x in range(5)][1:4] | 732 | # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3 |
774 | for word in version: | 733 | return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4)) |
775 | kerver.append(word.split('= ')[1].split('\n')[0]) | 734 | except FileNotFoundError: |
776 | staging_kerVer = ".".join(kerver) | 735 | return "" |
777 | return staging_kerVer | ||
778 | 736 | ||
779 | def get_staging_kbranch(srcdir): | 737 | def get_staging_kbranch(srcdir): |
738 | import bb.process | ||
780 | staging_kbranch = "" | 739 | staging_kbranch = "" |
781 | if os.path.exists(srcdir) and os.listdir(srcdir): | 740 | if os.path.exists(srcdir) and os.listdir(srcdir): |
782 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) | 741 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) |
783 | staging_kbranch = "".join(branch.split('\n')[0]) | 742 | staging_kbranch = "".join(branch.split('\n')[0]) |
784 | return staging_kbranch | 743 | return staging_kbranch |
785 | 744 | ||
786 | def get_real_srctree(srctree, s, workdir): | 745 | def get_real_srctree(srctree, s, unpackdir): |
787 | # Check that recipe isn't using a shared workdir | 746 | # Check that recipe isn't using a shared workdir |
788 | s = os.path.abspath(s) | 747 | s = os.path.abspath(s) |
789 | workdir = os.path.abspath(workdir) | 748 | unpackdir = os.path.abspath(unpackdir) |
790 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | 749 | if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir: |
791 | # Handle if S is set to a subdirectory of the source | 750 | # Handle if S is set to a subdirectory of the source |
792 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | 751 | srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1] |
793 | srctree = os.path.join(srctree, srcsubdir) | 752 | srctree = os.path.join(srctree, srcsubdir) |
794 | return srctree | 753 | return srctree |
795 | 754 | ||
796 | def modify(args, config, basepath, workspace): | 755 | def modify(args, config, basepath, workspace): |
797 | """Entry point for the devtool 'modify' subcommand""" | 756 | """Entry point for the devtool 'modify' subcommand""" |
798 | import bb | 757 | import bb.data |
758 | import bb.process | ||
799 | import oe.recipeutils | 759 | import oe.recipeutils |
800 | import oe.patch | 760 | import oe.patch |
801 | import oe.path | 761 | import oe.path |
@@ -851,35 +811,21 @@ def modify(args, config, basepath, workspace): | |||
851 | staging_kbranch = get_staging_kbranch(srcdir) | 811 | staging_kbranch = get_staging_kbranch(srcdir) |
852 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 812 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
853 | oe.path.copyhardlinktree(srcdir, srctree) | 813 | oe.path.copyhardlinktree(srcdir, srctree) |
854 | workdir = rd.getVar('WORKDIR') | 814 | unpackdir = rd.getVar('UNPACKDIR') |
855 | srcsubdir = rd.getVar('S') | 815 | srcsubdir = rd.getVar('S') |
856 | localfilesdir = os.path.join(srctree, 'oe-local-files') | ||
857 | # Move local source files into separate subdir | ||
858 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | ||
859 | local_files = oe.recipeutils.get_recipe_local_files(rd) | ||
860 | 816 | ||
861 | for key in local_files.copy(): | 817 | # Add locally copied files to gitignore as we add back to the metadata directly |
862 | if key.endswith('scc'): | 818 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
863 | sccfile = open(local_files[key], 'r') | ||
864 | for l in sccfile: | ||
865 | line = l.split() | ||
866 | if line and line[0] in ('kconf', 'patch'): | ||
867 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
868 | if not cfg in local_files.values(): | ||
869 | local_files[line[-1]] = cfg | ||
870 | shutil.copy2(cfg, workdir) | ||
871 | sccfile.close() | ||
872 | |||
873 | # Ignore local files with subdir={BP} | ||
874 | srcabspath = os.path.abspath(srcsubdir) | 819 | srcabspath = os.path.abspath(srcsubdir) |
875 | local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] | 820 | local_files = [fname for fname in local_files if |
821 | os.path.exists(os.path.join(unpackdir, fname)) and | ||
822 | srcabspath == unpackdir] | ||
876 | if local_files: | 823 | if local_files: |
877 | for fname in local_files: | 824 | with open(os.path.join(srctree, '.gitignore'), 'a+') as f: |
878 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 825 | f.write('# Ignore local files, by default. Remove following lines' |
879 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 826 | 'if you want to commit the directory to Git\n') |
880 | f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n') | 827 | for fname in local_files: |
881 | 828 | f.write('%s\n' % fname) | |
882 | symlink_oelocal_files_srctree(rd, srctree) | ||
883 | 829 | ||
884 | task = 'do_configure' | 830 | task = 'do_configure' |
885 | res = tinfoil.build_targets(pn, task, handle_events=True) | 831 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -961,7 +907,7 @@ def modify(args, config, basepath, workspace): | |||
961 | 907 | ||
962 | # Need to grab this here in case the source is within a subdirectory | 908 | # Need to grab this here in case the source is within a subdirectory |
963 | srctreebase = srctree | 909 | srctreebase = srctree |
964 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | 910 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
965 | 911 | ||
966 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 912 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
967 | with open(appendfile, 'w') as f: | 913 | with open(appendfile, 'w') as f: |
@@ -1001,13 +947,6 @@ def modify(args, config, basepath, workspace): | |||
1001 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) | 947 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
1002 | 948 | ||
1003 | if bb.data.inherits_class('kernel', rd): | 949 | if bb.data.inherits_class('kernel', rd): |
1004 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | ||
1005 | 'do_fetch do_unpack do_kernel_configcheck"\n') | ||
1006 | f.write('\ndo_patch[noexec] = "1"\n') | ||
1007 | f.write('\ndo_configure:append() {\n' | ||
1008 | ' cp ${B}/.config ${S}/.config.baseline\n' | ||
1009 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | ||
1010 | '}\n') | ||
1011 | f.write('\ndo_kernel_configme:prepend() {\n' | 950 | f.write('\ndo_kernel_configme:prepend() {\n' |
1012 | ' if [ -e ${S}/.config ]; then\n' | 951 | ' if [ -e ${S}/.config ]; then\n' |
1013 | ' mv ${S}/.config ${S}/.config.old\n' | 952 | ' mv ${S}/.config ${S}/.config.old\n' |
@@ -1031,6 +970,8 @@ def modify(args, config, basepath, workspace): | |||
1031 | if branch == args.branch: | 970 | if branch == args.branch: |
1032 | continue | 971 | continue |
1033 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) | 972 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) |
973 | if args.debug_build: | ||
974 | f.write('\nDEBUG_BUILD = "1"\n') | ||
1034 | 975 | ||
1035 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 976 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
1036 | 977 | ||
@@ -1075,6 +1016,7 @@ def rename(args, config, basepath, workspace): | |||
1075 | origfnver = '' | 1016 | origfnver = '' |
1076 | 1017 | ||
1077 | recipefilemd5 = None | 1018 | recipefilemd5 = None |
1019 | newrecipefilemd5 = None | ||
1078 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 1020 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
1079 | try: | 1021 | try: |
1080 | rd = parse_recipe(config, tinfoil, args.recipename, True) | 1022 | rd = parse_recipe(config, tinfoil, args.recipename, True) |
@@ -1152,6 +1094,7 @@ def rename(args, config, basepath, workspace): | |||
1152 | 1094 | ||
1153 | # Rename source tree if it's the default path | 1095 | # Rename source tree if it's the default path |
1154 | appendmd5 = None | 1096 | appendmd5 = None |
1097 | newappendmd5 = None | ||
1155 | if not args.no_srctree: | 1098 | if not args.no_srctree: |
1156 | srctree = workspace[args.recipename]['srctree'] | 1099 | srctree = workspace[args.recipename]['srctree'] |
1157 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): | 1100 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): |
@@ -1240,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1240 | """Get initial and update rev of a recipe. These are the start point of the | 1183 | """Get initial and update rev of a recipe. These are the start point of the |
1241 | whole patchset and start point for the patches to be re-generated/updated. | 1184 | whole patchset and start point for the patches to be re-generated/updated. |
1242 | """ | 1185 | """ |
1243 | import bb | 1186 | import bb.process |
1244 | 1187 | ||
1245 | # Get current branch | 1188 | # Get current branch |
1246 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', | 1189 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
@@ -1366,6 +1309,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
1366 | """ | 1309 | """ |
1367 | import oe.recipeutils | 1310 | import oe.recipeutils |
1368 | from oe.patch import GitApplyTree | 1311 | from oe.patch import GitApplyTree |
1312 | import bb.process | ||
1369 | updated = OrderedDict() | 1313 | updated = OrderedDict() |
1370 | added = OrderedDict() | 1314 | added = OrderedDict() |
1371 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') | 1315 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') |
@@ -1387,6 +1331,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
1387 | # values, but they ought to be anyway... | 1331 | # values, but they ought to be anyway... |
1388 | new_basename = seqpatch_re.match(new_patch).group(2) | 1332 | new_basename = seqpatch_re.match(new_patch).group(2) |
1389 | match_name = None | 1333 | match_name = None |
1334 | old_patch = None | ||
1390 | for old_patch in existing_patches: | 1335 | for old_patch in existing_patches: |
1391 | old_basename = seqpatch_re.match(old_patch).group(2) | 1336 | old_basename = seqpatch_re.match(old_patch).group(2) |
1392 | old_basename_splitext = os.path.splitext(old_basename) | 1337 | old_basename_splitext = os.path.splitext(old_basename) |
@@ -1435,6 +1380,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
1435 | 1380 | ||
1436 | def _create_kconfig_diff(srctree, rd, outfile): | 1381 | def _create_kconfig_diff(srctree, rd, outfile): |
1437 | """Create a kconfig fragment""" | 1382 | """Create a kconfig fragment""" |
1383 | import bb.process | ||
1438 | # Only update config fragment if both config files exist | 1384 | # Only update config fragment if both config files exist |
1439 | orig_config = os.path.join(srctree, '.config.baseline') | 1385 | orig_config = os.path.join(srctree, '.config.baseline') |
1440 | new_config = os.path.join(srctree, '.config.new') | 1386 | new_config = os.path.join(srctree, '.config.new') |
@@ -1472,12 +1418,15 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1472 | - for removed dict, the absolute path to the existing file in recipe space | 1418 | - for removed dict, the absolute path to the existing file in recipe space |
1473 | """ | 1419 | """ |
1474 | import oe.recipeutils | 1420 | import oe.recipeutils |
1421 | import bb.data | ||
1422 | import bb.process | ||
1475 | 1423 | ||
1476 | # Find out local files (SRC_URI files that exist in the "recipe space"). | 1424 | # Find out local files (SRC_URI files that exist in the "recipe space"). |
1477 | # Local files that reside in srctree are not included in patch generation. | 1425 | # Local files that reside in srctree are not included in patch generation. |
1478 | # Instead they are directly copied over the original source files (in | 1426 | # Instead they are directly copied over the original source files (in |
1479 | # recipe space). | 1427 | # recipe space). |
1480 | existing_files = oe.recipeutils.get_recipe_local_files(rd) | 1428 | existing_files = oe.recipeutils.get_recipe_local_files(rd) |
1429 | |||
1481 | new_set = None | 1430 | new_set = None |
1482 | updated = OrderedDict() | 1431 | updated = OrderedDict() |
1483 | added = OrderedDict() | 1432 | added = OrderedDict() |
@@ -1494,24 +1443,28 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1494 | if branchname.startswith(override_branch_prefix): | 1443 | if branchname.startswith(override_branch_prefix): |
1495 | return (updated, added, removed) | 1444 | return (updated, added, removed) |
1496 | 1445 | ||
1497 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1446 | files = _git_modified(srctree) |
1498 | git_files = _git_ls_tree(srctree) | 1447 | #if not files: |
1499 | if 'oe-local-files' in git_files: | 1448 | # files = _ls_tree(srctree) |
1500 | # If tracked by Git, take the files from srctree HEAD. First get | 1449 | for f in files: |
1501 | # the tree object of the directory | 1450 | fullfile = os.path.join(srctree, f) |
1502 | tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') | 1451 | if os.path.exists(os.path.join(fullfile, ".git")): |
1503 | tree = git_files['oe-local-files'][2] | 1452 | # submodules handled elsewhere |
1504 | bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, | 1453 | continue |
1505 | env=dict(os.environ, GIT_WORK_TREE=destdir, | 1454 | if f not in existing_files: |
1506 | GIT_INDEX_FILE=tmp_index)) | 1455 | added[f] = {} |
1507 | new_set = list(_git_ls_tree(srctree, tree, True).keys()) | 1456 | if os.path.isdir(os.path.join(srctree, f)): |
1508 | elif os.path.isdir(local_files_dir): | 1457 | shutil.copytree(fullfile, os.path.join(destdir, f)) |
1509 | # If not tracked by Git, just copy from working copy | 1458 | else: |
1510 | new_set = _ls_tree(local_files_dir) | 1459 | shutil.copy2(fullfile, os.path.join(destdir, f)) |
1511 | bb.process.run(['cp', '-ax', | 1460 | elif not os.path.exists(fullfile): |
1512 | os.path.join(local_files_dir, '.'), destdir]) | 1461 | removed[f] = existing_files[f] |
1513 | else: | 1462 | elif f in existing_files: |
1514 | new_set = [] | 1463 | updated[f] = {'path' : existing_files[f]} |
1464 | if os.path.isdir(os.path.join(srctree, f)): | ||
1465 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1466 | else: | ||
1467 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1515 | 1468 | ||
1516 | # Special handling for kernel config | 1469 | # Special handling for kernel config |
1517 | if bb.data.inherits_class('kernel-yocto', rd): | 1470 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -1519,17 +1472,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1519 | fragment_path = os.path.join(destdir, fragment_fn) | 1472 | fragment_path = os.path.join(destdir, fragment_fn) |
1520 | if _create_kconfig_diff(srctree, rd, fragment_path): | 1473 | if _create_kconfig_diff(srctree, rd, fragment_path): |
1521 | if os.path.exists(fragment_path): | 1474 | if os.path.exists(fragment_path): |
1522 | if fragment_fn not in new_set: | 1475 | if fragment_fn in removed: |
1523 | new_set.append(fragment_fn) | 1476 | del removed[fragment_fn] |
1524 | # Copy fragment to local-files | 1477 | if fragment_fn not in updated and fragment_fn not in added: |
1525 | if os.path.isdir(local_files_dir): | 1478 | added[fragment_fn] = {} |
1526 | shutil.copy2(fragment_path, local_files_dir) | ||
1527 | else: | 1479 | else: |
1528 | if fragment_fn in new_set: | 1480 | if fragment_fn in updated: |
1529 | new_set.remove(fragment_fn) | 1481 | removed[fragment_fn] = updated[fragment_fn] |
1530 | # Remove fragment from local-files | 1482 | del updated[fragment_fn] |
1531 | if os.path.exists(os.path.join(local_files_dir, fragment_fn)): | ||
1532 | os.unlink(os.path.join(local_files_dir, fragment_fn)) | ||
1533 | 1483 | ||
1534 | # Special handling for cml1, ccmake, etc bbclasses that generated | 1484 | # Special handling for cml1, ccmake, etc bbclasses that generated |
1535 | # configuration fragment files that are consumed as source files | 1485 | # configuration fragment files that are consumed as source files |
@@ -1537,42 +1487,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1537 | if bb.data.inherits_class(frag_class, rd): | 1487 | if bb.data.inherits_class(frag_class, rd): |
1538 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) | 1488 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) |
1539 | if os.path.exists(srcpath): | 1489 | if os.path.exists(srcpath): |
1540 | if frag_name not in new_set: | 1490 | if frag_name in removed: |
1541 | new_set.append(frag_name) | 1491 | del removed[frag_name] |
1492 | if frag_name not in updated: | ||
1493 | added[frag_name] = {} | ||
1542 | # copy fragment into destdir | 1494 | # copy fragment into destdir |
1543 | shutil.copy2(srcpath, destdir) | 1495 | shutil.copy2(srcpath, destdir) |
1544 | # copy fragment into local files if exists | 1496 | |
1545 | if os.path.isdir(local_files_dir): | ||
1546 | shutil.copy2(srcpath, local_files_dir) | ||
1547 | |||
1548 | if new_set is not None: | ||
1549 | for fname in new_set: | ||
1550 | if fname in existing_files: | ||
1551 | origpath = existing_files.pop(fname) | ||
1552 | workpath = os.path.join(local_files_dir, fname) | ||
1553 | if not filecmp.cmp(origpath, workpath): | ||
1554 | updated[fname] = {'path' : origpath} | ||
1555 | elif fname != '.gitignore': | ||
1556 | added[fname] = {} | ||
1557 | |||
1558 | workdir = rd.getVar('WORKDIR') | ||
1559 | s = rd.getVar('S') | ||
1560 | if not s.endswith(os.sep): | ||
1561 | s += os.sep | ||
1562 | |||
1563 | if workdir != s: | ||
1564 | # Handle files where subdir= was specified | ||
1565 | for fname in list(existing_files.keys()): | ||
1566 | # FIXME handle both subdir starting with BP and not? | ||
1567 | fworkpath = os.path.join(workdir, fname) | ||
1568 | if fworkpath.startswith(s): | ||
1569 | fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) | ||
1570 | if os.path.exists(fpath): | ||
1571 | origpath = existing_files.pop(fname) | ||
1572 | if not filecmp.cmp(origpath, fpath): | ||
1573 | updated[fpath] = {'path' : origpath} | ||
1574 | |||
1575 | removed = existing_files | ||
1576 | return (updated, added, removed) | 1497 | return (updated, added, removed) |
1577 | 1498 | ||
1578 | 1499 | ||
@@ -1590,7 +1511,7 @@ def _determine_files_dir(rd): | |||
1590 | 1511 | ||
1591 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): | 1512 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): |
1592 | """Implement the 'srcrev' mode of update-recipe""" | 1513 | """Implement the 'srcrev' mode of update-recipe""" |
1593 | import bb | 1514 | import bb.process |
1594 | import oe.recipeutils | 1515 | import oe.recipeutils |
1595 | 1516 | ||
1596 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 1517 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
@@ -1628,6 +1549,7 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1628 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1549 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1629 | srctreebase = workspace[recipename]['srctreebase'] | 1550 | srctreebase = workspace[recipename]['srctreebase'] |
1630 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | 1551 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1552 | removedentries = {} | ||
1631 | if not no_remove: | 1553 | if not no_remove: |
1632 | # Find list of existing patches in recipe file | 1554 | # Find list of existing patches in recipe file |
1633 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1555 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
@@ -1691,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1691 | 1613 | ||
1692 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): | 1614 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): |
1693 | """Implement the 'patch' mode of update-recipe""" | 1615 | """Implement the 'patch' mode of update-recipe""" |
1694 | import bb | ||
1695 | import oe.recipeutils | 1616 | import oe.recipeutils |
1696 | 1617 | ||
1697 | recipefile = rd.getVar('FILE') | 1618 | recipefile = rd.getVar('FILE') |
@@ -1805,6 +1726,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1805 | for basepath, param in upd_p.items(): | 1726 | for basepath, param in upd_p.items(): |
1806 | path = param['path'] | 1727 | path = param['path'] |
1807 | patchdir = param.get('patchdir', ".") | 1728 | patchdir = param.get('patchdir', ".") |
1729 | patchdir_param = {} | ||
1808 | if patchdir != "." : | 1730 | if patchdir != "." : |
1809 | patchdir_param = dict(patchdir_params) | 1731 | patchdir_param = dict(patchdir_params) |
1810 | if patchdir_param: | 1732 | if patchdir_param: |
@@ -1870,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1870 | 1792 | ||
1871 | def _guess_recipe_update_mode(srctree, rdata): | 1793 | def _guess_recipe_update_mode(srctree, rdata): |
1872 | """Guess the recipe update mode to use""" | 1794 | """Guess the recipe update mode to use""" |
1795 | import bb.process | ||
1873 | src_uri = (rdata.getVar('SRC_URI') or '').split() | 1796 | src_uri = (rdata.getVar('SRC_URI') or '').split() |
1874 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] | 1797 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] |
1875 | if not git_uris: | 1798 | if not git_uris: |
@@ -1891,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata): | |||
1891 | return 'patch' | 1814 | return 'patch' |
1892 | 1815 | ||
1893 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): | 1816 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): |
1817 | import bb.data | ||
1818 | import bb.process | ||
1894 | srctree = workspace[recipename]['srctree'] | 1819 | srctree = workspace[recipename]['srctree'] |
1895 | if mode == 'auto': | 1820 | if mode == 'auto': |
1896 | mode = _guess_recipe_update_mode(srctree, rd) | 1821 | mode = _guess_recipe_update_mode(srctree, rd) |
@@ -2013,6 +1938,7 @@ def status(args, config, basepath, workspace): | |||
2013 | 1938 | ||
2014 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | 1939 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): |
2015 | """Reset one or more recipes""" | 1940 | """Reset one or more recipes""" |
1941 | import bb.process | ||
2016 | import oe.path | 1942 | import oe.path |
2017 | 1943 | ||
2018 | def clean_preferred_provider(pn, layerconf_path): | 1944 | def clean_preferred_provider(pn, layerconf_path): |
@@ -2025,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
2025 | lines = f.readlines() | 1951 | lines = f.readlines() |
2026 | with open(new_layerconf_file, 'a') as nf: | 1952 | with open(new_layerconf_file, 'a') as nf: |
2027 | for line in lines: | 1953 | for line in lines: |
2028 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' | 1954 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$' |
2029 | if not re.match(pprovider_exp, line): | 1955 | if not re.match(pprovider_exp, line): |
2030 | nf.write(line) | 1956 | nf.write(line) |
2031 | else: | 1957 | else: |
@@ -2116,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
2116 | 2042 | ||
2117 | def reset(args, config, basepath, workspace): | 2043 | def reset(args, config, basepath, workspace): |
2118 | """Entry point for the devtool 'reset' subcommand""" | 2044 | """Entry point for the devtool 'reset' subcommand""" |
2119 | import bb | ||
2120 | import shutil | ||
2121 | 2045 | ||
2122 | recipes = "" | 2046 | recipes = "" |
2123 | 2047 | ||
@@ -2396,6 +2320,7 @@ def register_commands(subparsers, context): | |||
2396 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') | 2320 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') |
2397 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') | 2321 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') |
2398 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") | 2322 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") |
2323 | parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe') | ||
2399 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) | 2324 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) |
2400 | 2325 | ||
2401 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', | 2326 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', |
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index fa5b8ef3c7..d9aca6e2db 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -32,7 +32,7 @@ def _run(cmd, cwd=''): | |||
32 | 32 | ||
33 | def _get_srctree(tmpdir): | 33 | def _get_srctree(tmpdir): |
34 | srctree = tmpdir | 34 | srctree = tmpdir |
35 | dirs = scriptutils.filter_src_subdirs(tmpdir) | 35 | dirs = os.listdir(tmpdir) |
36 | if len(dirs) == 1: | 36 | if len(dirs) == 1: |
37 | srctree = os.path.join(tmpdir, dirs[0]) | 37 | srctree = os.path.join(tmpdir, dirs[0]) |
38 | else: | 38 | else: |
@@ -76,19 +76,19 @@ def _rename_recipe_dirs(oldpv, newpv, path): | |||
76 | bb.utils.rename(os.path.join(path, oldfile), | 76 | bb.utils.rename(os.path.join(path, oldfile), |
77 | os.path.join(path, newfile)) | 77 | os.path.join(path, newfile)) |
78 | 78 | ||
79 | def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): | 79 | def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path): |
80 | oldrecipe = os.path.basename(oldrecipe) | 80 | oldrecipe = os.path.basename(oldrecipe) |
81 | if oldrecipe.endswith('_%s.bb' % oldpv): | 81 | if oldrecipe.endswith('_%s.bb' % oldpv): |
82 | newrecipe = '%s_%s.bb' % (bpn, newpv) | 82 | newrecipe = '%s_%s.bb' % (pn, newpv) |
83 | if oldrecipe != newrecipe: | 83 | if oldrecipe != newrecipe: |
84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) | 84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) |
85 | else: | 85 | else: |
86 | newrecipe = oldrecipe | 86 | newrecipe = oldrecipe |
87 | return os.path.join(path, newrecipe) | 87 | return os.path.join(path, newrecipe) |
88 | 88 | ||
89 | def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): | 89 | def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path): |
90 | _rename_recipe_dirs(oldpv, newpv, path) | 90 | _rename_recipe_dirs(oldpv, newpv, path) |
91 | return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) | 91 | return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path) |
92 | 92 | ||
93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): | 93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): |
94 | """Writes an append file""" | 94 | """Writes an append file""" |
@@ -169,6 +169,7 @@ def _get_uri(rd): | |||
169 | 169 | ||
170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): | 170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): |
171 | """Extract sources of a recipe with a new version""" | 171 | """Extract sources of a recipe with a new version""" |
172 | import oe.patch | ||
172 | 173 | ||
173 | def __run(cmd): | 174 | def __run(cmd): |
174 | """Simple wrapper which calls _run with srctree as cwd""" | 175 | """Simple wrapper which calls _run with srctree as cwd""" |
@@ -187,9 +188,9 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
187 | if uri.startswith('git://') or uri.startswith('gitsm://'): | 188 | if uri.startswith('git://') or uri.startswith('gitsm://'): |
188 | __run('git fetch') | 189 | __run('git fetch') |
189 | __run('git checkout %s' % rev) | 190 | __run('git checkout %s' % rev) |
190 | __run('git tag -f devtool-base-new') | 191 | __run('git tag -f --no-sign devtool-base-new') |
191 | __run('git submodule update --recursive') | 192 | __run('git submodule update --recursive') |
192 | __run('git submodule foreach \'git tag -f devtool-base-new\'') | 193 | __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'') |
193 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') | 194 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') |
194 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] | 195 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] |
195 | checksums = {} | 196 | checksums = {} |
@@ -256,7 +257,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
256 | useroptions = [] | 257 | useroptions = [] |
257 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | 258 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) |
258 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) | 259 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) |
259 | __run('git tag -f devtool-base-%s' % newpv) | 260 | __run('git tag -f --no-sign devtool-base-%s' % newpv) |
260 | 261 | ||
261 | revs = {} | 262 | revs = {} |
262 | for path in paths: | 263 | for path in paths: |
@@ -335,19 +336,19 @@ def _add_license_diff_to_recipe(path, diff): | |||
335 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): | 336 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): |
336 | """Creates the new recipe under workspace""" | 337 | """Creates the new recipe under workspace""" |
337 | 338 | ||
338 | bpn = rd.getVar('BPN') | 339 | pn = rd.getVar('PN') |
339 | path = os.path.join(workspace, 'recipes', bpn) | 340 | path = os.path.join(workspace, 'recipes', pn) |
340 | bb.utils.mkdirhier(path) | 341 | bb.utils.mkdirhier(path) |
341 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) | 342 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) |
342 | if not copied: | 343 | if not copied: |
343 | raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) | 344 | raise DevtoolError('Internal error - no files were copied for recipe %s' % pn) |
344 | logger.debug('Copied %s to %s' % (copied, path)) | 345 | logger.debug('Copied %s to %s' % (copied, path)) |
345 | 346 | ||
346 | oldpv = rd.getVar('PV') | 347 | oldpv = rd.getVar('PV') |
347 | if not newpv: | 348 | if not newpv: |
348 | newpv = oldpv | 349 | newpv = oldpv |
349 | origpath = rd.getVar('FILE') | 350 | origpath = rd.getVar('FILE') |
350 | fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) | 351 | fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path) |
351 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) | 352 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) |
352 | 353 | ||
353 | newvalues = {} | 354 | newvalues = {} |
@@ -534,6 +535,15 @@ def _generate_license_diff(old_licenses, new_licenses): | |||
534 | diff = diff + line | 535 | diff = diff + line |
535 | return diff | 536 | return diff |
536 | 537 | ||
538 | def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil): | ||
539 | tasks = [] | ||
540 | for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split(): | ||
541 | logger.info('Running extra recipe upgrade task: %s' % task) | ||
542 | res = tinfoil.build_targets(pn, task, handle_events=True) | ||
543 | |||
544 | if not res: | ||
545 | raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn)) | ||
546 | |||
537 | def upgrade(args, config, basepath, workspace): | 547 | def upgrade(args, config, basepath, workspace): |
538 | """Entry point for the devtool 'upgrade' subcommand""" | 548 | """Entry point for the devtool 'upgrade' subcommand""" |
539 | 549 | ||
@@ -561,7 +571,7 @@ def upgrade(args, config, basepath, workspace): | |||
561 | else: | 571 | else: |
562 | srctree = standard.get_default_srctree(config, pn) | 572 | srctree = standard.get_default_srctree(config, pn) |
563 | 573 | ||
564 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | 574 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
565 | 575 | ||
566 | # try to automatically discover latest version and revision if not provided on command line | 576 | # try to automatically discover latest version and revision if not provided on command line |
567 | if not args.version and not args.srcrev: | 577 | if not args.version and not args.srcrev: |
@@ -601,7 +611,7 @@ def upgrade(args, config, basepath, workspace): | |||
601 | license_diff = _generate_license_diff(old_licenses, new_licenses) | 611 | license_diff = _generate_license_diff(old_licenses, new_licenses) |
602 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) | 612 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) |
603 | except (bb.process.CmdError, DevtoolError) as e: | 613 | except (bb.process.CmdError, DevtoolError) as e: |
604 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN')) | 614 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN')) |
605 | _upgrade_error(e, recipedir, srctree, args.keep_failure) | 615 | _upgrade_error(e, recipedir, srctree, args.keep_failure) |
606 | standard._add_md5(config, pn, os.path.dirname(rf)) | 616 | standard._add_md5(config, pn, os.path.dirname(rf)) |
607 | 617 | ||
@@ -609,6 +619,8 @@ def upgrade(args, config, basepath, workspace): | |||
609 | copied, config.workspace_path, rd) | 619 | copied, config.workspace_path, rd) |
610 | standard._add_md5(config, pn, af) | 620 | standard._add_md5(config, pn, af) |
611 | 621 | ||
622 | _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil) | ||
623 | |||
612 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 624 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
613 | 625 | ||
614 | logger.info('Upgraded source extracted to %s' % srctree) | 626 | logger.info('Upgraded source extracted to %s' % srctree) |
@@ -643,18 +655,28 @@ def latest_version(args, config, basepath, workspace): | |||
643 | return 0 | 655 | return 0 |
644 | 656 | ||
645 | def check_upgrade_status(args, config, basepath, workspace): | 657 | def check_upgrade_status(args, config, basepath, workspace): |
658 | def _print_status(recipe): | ||
659 | print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'], | ||
660 | recipe['cur_ver'], | ||
661 | recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"), | ||
662 | recipe['maintainer'], | ||
663 | recipe['revision'] if recipe['revision'] != 'N/A' else "", | ||
664 | "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else "")) | ||
646 | if not args.recipe: | 665 | if not args.recipe: |
647 | logger.info("Checking the upstream status for all recipes may take a few minutes") | 666 | logger.info("Checking the upstream status for all recipes may take a few minutes") |
648 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) | 667 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) |
649 | for result in results: | 668 | for recipegroup in results: |
650 | # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason | 669 | upgrades = [r for r in recipegroup if r['status'] != 'MATCH'] |
651 | if args.all or result[1] != 'MATCH': | 670 | currents = [r for r in recipegroup if r['status'] == 'MATCH'] |
652 | print("{:25} {:15} {:15} {} {} {}".format( result[0], | 671 | if len(upgrades) > 1: |
653 | result[2], | 672 | print("These recipes need to be upgraded together {") |
654 | result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), | 673 | for r in sorted(upgrades, key=lambda r:r['pn']): |
655 | result[4], | 674 | _print_status(r) |
656 | result[5] if result[5] != 'N/A' else "", | 675 | if len(upgrades) > 1: |
657 | "cannot be updated due to: %s" %(result[6]) if result[6] else "")) | 676 | print("}") |
677 | for r in currents: | ||
678 | if args.all: | ||
679 | _print_status(r) | ||
658 | 680 | ||
659 | def register_commands(subparsers, context): | 681 | def register_commands(subparsers, context): |
660 | """Register devtool subcommands from this plugin""" | 682 | """Register devtool subcommands from this plugin""" |
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py index 964817766b..bf39f71b11 100644 --- a/scripts/lib/devtool/utilcmds.py +++ b/scripts/lib/devtool/utilcmds.py | |||
@@ -64,7 +64,7 @@ def configure_help(args, config, basepath, workspace): | |||
64 | b = rd.getVar('B') | 64 | b = rd.getVar('B') |
65 | s = rd.getVar('S') | 65 | s = rd.getVar('S') |
66 | configurescript = os.path.join(s, 'configure') | 66 | configurescript = os.path.join(s, 'configure') |
67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) | 67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd)) |
68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') | 68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') |
69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') | 69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') |
70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') | 70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') |
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py index 10945d6008..041d79f162 100644 --- a/scripts/lib/recipetool/append.py +++ b/scripts/lib/recipetool/append.py | |||
@@ -317,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None): | |||
317 | import oe.recipeutils | 317 | import oe.recipeutils |
318 | 318 | ||
319 | srcdir = rd.getVar('S') | 319 | srcdir = rd.getVar('S') |
320 | workdir = rd.getVar('WORKDIR') | 320 | unpackdir = rd.getVar('UNPACKDIR') |
321 | 321 | ||
322 | import bb.fetch | 322 | import bb.fetch |
323 | simplified = {} | 323 | simplified = {} |
@@ -336,10 +336,10 @@ def appendsrc(args, files, rd, extralines=None): | |||
336 | src_destdir = os.path.dirname(srcfile) | 336 | src_destdir = os.path.dirname(srcfile) |
337 | if not args.use_workdir: | 337 | if not args.use_workdir: |
338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): | 338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): |
339 | srcdir = os.path.join(workdir, 'git') | 339 | srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX')) |
340 | if not bb.data.inherits_class('kernel-yocto', rd): | 340 | if not bb.data.inherits_class('kernel-yocto', rd): |
341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') | 341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}') |
342 | src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) | 342 | src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir) |
343 | src_destdir = os.path.normpath(src_destdir) | 343 | src_destdir = os.path.normpath(src_destdir) |
344 | 344 | ||
345 | if src_destdir and src_destdir != '.': | 345 | if src_destdir and src_destdir != '.': |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index 8e9ff38db6..ef0ba974a9 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
@@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit | |||
18 | import hashlib | 18 | import hashlib |
19 | import bb.fetch2 | 19 | import bb.fetch2 |
20 | logger = logging.getLogger('recipetool') | 20 | logger = logging.getLogger('recipetool') |
21 | from oe.license import tidy_licenses | ||
22 | from oe.license_finder import find_licenses | ||
21 | 23 | ||
22 | tinfoil = None | 24 | tinfoil = None |
23 | plugins = None | 25 | plugins = None |
@@ -528,7 +530,7 @@ def create_recipe(args): | |||
528 | if ftmpdir and args.keep_temp: | 530 | if ftmpdir and args.keep_temp: |
529 | logger.info('Fetch temp directory is %s' % ftmpdir) | 531 | logger.info('Fetch temp directory is %s' % ftmpdir) |
530 | 532 | ||
531 | dirlist = scriptutils.filter_src_subdirs(srctree) | 533 | dirlist = os.listdir(srctree) |
532 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) | 534 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) |
533 | if len(dirlist) == 1: | 535 | if len(dirlist) == 1: |
534 | singleitem = os.path.join(srctree, dirlist[0]) | 536 | singleitem = os.path.join(srctree, dirlist[0]) |
@@ -637,7 +639,6 @@ def create_recipe(args): | |||
637 | if len(splitline) > 1: | 639 | if len(splitline) > 1: |
638 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): | 640 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): |
639 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' | 641 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' |
640 | srcsubdir = 'git' | ||
641 | break | 642 | break |
642 | 643 | ||
643 | if args.src_subdir: | 644 | if args.src_subdir: |
@@ -735,7 +736,7 @@ def create_recipe(args): | |||
735 | if srcsubdir and not args.binary: | 736 | if srcsubdir and not args.binary: |
736 | # (for binary packages we explicitly specify subdir= when fetching to | 737 | # (for binary packages we explicitly specify subdir= when fetching to |
737 | # match the default value of S, so we don't need to set it in that case) | 738 | # match the default value of S, so we don't need to set it in that case) |
738 | lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) | 739 | lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir) |
739 | lines_before.append('') | 740 | lines_before.append('') |
740 | 741 | ||
741 | if pkgarch: | 742 | if pkgarch: |
@@ -764,6 +765,7 @@ def create_recipe(args): | |||
764 | extrafiles = extravalues.pop('extrafiles', {}) | 765 | extrafiles = extravalues.pop('extrafiles', {}) |
765 | extra_pn = extravalues.pop('PN', None) | 766 | extra_pn = extravalues.pop('PN', None) |
766 | extra_pv = extravalues.pop('PV', None) | 767 | extra_pv = extravalues.pop('PV', None) |
768 | run_tasks = extravalues.pop('run_tasks', "").split() | ||
767 | 769 | ||
768 | if extra_pv and not realpv: | 770 | if extra_pv and not realpv: |
769 | realpv = extra_pv | 771 | realpv = extra_pv |
@@ -824,7 +826,8 @@ def create_recipe(args): | |||
824 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) | 826 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) |
825 | bb.utils.mkdirhier(extraoutdir) | 827 | bb.utils.mkdirhier(extraoutdir) |
826 | for destfn, extrafile in extrafiles.items(): | 828 | for destfn, extrafile in extrafiles.items(): |
827 | shutil.move(extrafile, os.path.join(extraoutdir, destfn)) | 829 | fn = destfn.format(pn=pn, pv=realpv) |
830 | shutil.move(extrafile, os.path.join(extraoutdir, fn)) | ||
828 | 831 | ||
829 | lines = lines_before | 832 | lines = lines_before |
830 | lines_before = [] | 833 | lines_before = [] |
@@ -839,7 +842,7 @@ def create_recipe(args): | |||
839 | line = line.replace(realpv, '${PV}') | 842 | line = line.replace(realpv, '${PV}') |
840 | if pn: | 843 | if pn: |
841 | line = line.replace(pn, '${BPN}') | 844 | line = line.replace(pn, '${BPN}') |
842 | if line == 'S = "${WORKDIR}/${BPN}-${PV}"': | 845 | if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line: |
843 | skipblank = True | 846 | skipblank = True |
844 | continue | 847 | continue |
845 | elif line.startswith('SRC_URI = '): | 848 | elif line.startswith('SRC_URI = '): |
@@ -917,6 +920,10 @@ def create_recipe(args): | |||
917 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) | 920 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) |
918 | tinfoil.modified_files() | 921 | tinfoil.modified_files() |
919 | 922 | ||
923 | for task in run_tasks: | ||
924 | logger.info("Running task %s" % task) | ||
925 | tinfoil.build_file_sync(outfile, task) | ||
926 | |||
920 | if tempsrc: | 927 | if tempsrc: |
921 | if args.keep_temp: | 928 | if args.keep_temp: |
922 | logger.info('Preserving temporary directory %s' % tempsrc) | 929 | logger.info('Preserving temporary directory %s' % tempsrc) |
@@ -944,23 +951,13 @@ def fixup_license(value): | |||
944 | return '(' + value + ')' | 951 | return '(' + value + ')' |
945 | return value | 952 | return value |
946 | 953 | ||
947 | def tidy_licenses(value): | ||
948 | """Flat, split and sort licenses""" | ||
949 | from oe.license import flattened_licenses | ||
950 | def _choose(a, b): | ||
951 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
952 | return ["(%s | %s)" % (str_a, str_b)] | ||
953 | if not isinstance(value, str): | ||
954 | value = " & ".join(value) | ||
955 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
956 | |||
957 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): | 954 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): |
958 | lichandled = [x for x in handled if x[0] == 'license'] | 955 | lichandled = [x for x in handled if x[0] == 'license'] |
959 | if lichandled: | 956 | if lichandled: |
960 | # Someone else has already handled the license vars, just return their value | 957 | # Someone else has already handled the license vars, just return their value |
961 | return lichandled[0][1] | 958 | return lichandled[0][1] |
962 | 959 | ||
963 | licvalues = guess_license(srctree, d) | 960 | licvalues = find_licenses(srctree, d) |
964 | licenses = [] | 961 | licenses = [] |
965 | lic_files_chksum = [] | 962 | lic_files_chksum = [] |
966 | lic_unknown = [] | 963 | lic_unknown = [] |
@@ -1040,222 +1037,9 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): | |||
1040 | handled.append(('license', licvalues)) | 1037 | handled.append(('license', licvalues)) |
1041 | return licvalues | 1038 | return licvalues |
1042 | 1039 | ||
1043 | def get_license_md5sums(d, static_only=False, linenumbers=False): | ||
1044 | import bb.utils | ||
1045 | import csv | ||
1046 | md5sums = {} | ||
1047 | if not static_only and not linenumbers: | ||
1048 | # Gather md5sums of license files in common license dir | ||
1049 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
1050 | for fn in os.listdir(commonlicdir): | ||
1051 | md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) | ||
1052 | md5sums[md5value] = fn | ||
1053 | |||
1054 | # The following were extracted from common values in various recipes | ||
1055 | # (double checking the license against the license file itself, not just | ||
1056 | # the LICENSE value in the recipe) | ||
1057 | |||
1058 | # Read license md5sums from csv file | ||
1059 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | ||
1060 | for path in (d.getVar('BBPATH').split(':') | ||
1061 | + [os.path.join(scripts_path, '..', '..')]): | ||
1062 | csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv') | ||
1063 | if os.path.isfile(csv_path): | ||
1064 | with open(csv_path, newline='') as csv_file: | ||
1065 | fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5'] | ||
1066 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames) | ||
1067 | for row in reader: | ||
1068 | if linenumbers: | ||
1069 | md5sums[row['md5sum']] = ( | ||
1070 | row['license'], row['beginline'], row['endline'], row['md5']) | ||
1071 | else: | ||
1072 | md5sums[row['md5sum']] = row['license'] | ||
1073 | |||
1074 | return md5sums | ||
1075 | |||
1076 | def crunch_known_licenses(d): | ||
1077 | ''' | ||
1078 | Calculate the MD5 checksums for the crunched versions of all common | ||
1079 | licenses. Also add additional known checksums. | ||
1080 | ''' | ||
1081 | |||
1082 | crunched_md5sums = {} | ||
1083 | |||
1084 | # common licenses | ||
1085 | crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only' | ||
1086 | crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only' | ||
1087 | crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only' | ||
1088 | |||
1089 | # The following two were gleaned from the "forever" npm package | ||
1090 | crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' | ||
1091 | # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt | ||
1092 | crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' | ||
1093 | # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE | ||
1094 | crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only' | ||
1095 | # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt | ||
1096 | crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only' | ||
1097 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 | ||
1098 | crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only' | ||
1099 | # unixODBC-2.3.4 COPYING | ||
1100 | crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only' | ||
1101 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 | ||
1102 | crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only' | ||
1103 | # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 | ||
1104 | crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' | ||
1105 | |||
1106 | # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD | ||
1107 | crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause' | ||
1108 | # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE | ||
1109 | crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause' | ||
1110 | # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE | ||
1111 | crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause' | ||
1112 | # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE | ||
1113 | crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause' | ||
1114 | # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE | ||
1115 | crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause' | ||
1116 | # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE | ||
1117 | crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause' | ||
1118 | # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE | ||
1119 | crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause' | ||
1120 | # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE | ||
1121 | crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause' | ||
1122 | # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE | ||
1123 | crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause' | ||
1124 | # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE | ||
1125 | crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT' | ||
1126 | # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE | ||
1127 | crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT' | ||
1128 | # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE | ||
1129 | crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0' | ||
1130 | # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md | ||
1131 | crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0' | ||
1132 | # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE | ||
1133 | crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0' | ||
1134 | # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt | ||
1135 | crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0' | ||
1136 | # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE | ||
1137 | crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0' | ||
1138 | # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE | ||
1139 | crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense' | ||
1140 | # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md | ||
1141 | crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib' | ||
1142 | |||
1143 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
1144 | for fn in sorted(os.listdir(commonlicdir)): | ||
1145 | md5value, lictext = crunch_license(os.path.join(commonlicdir, fn)) | ||
1146 | if md5value not in crunched_md5sums: | ||
1147 | crunched_md5sums[md5value] = fn | ||
1148 | elif fn != crunched_md5sums[md5value]: | ||
1149 | bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn)) | ||
1150 | else: | ||
1151 | bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value])) | ||
1152 | |||
1153 | return crunched_md5sums | ||
1154 | |||
1155 | def crunch_license(licfile): | ||
1156 | ''' | ||
1157 | Remove non-material text from a license file and then calculate its | ||
1158 | md5sum. This works well for licenses that contain a copyright statement, | ||
1159 | but is also a useful way to handle people's insistence upon reformatting | ||
1160 | the license text slightly (with no material difference to the text of the | ||
1161 | license). | ||
1162 | ''' | ||
1163 | |||
1164 | import oe.utils | ||
1165 | |||
1166 | # Note: these are carefully constructed! | ||
1167 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
1168 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
1169 | copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
1170 | disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
1171 | email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') | ||
1172 | header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
1173 | tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') | ||
1174 | url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
1175 | |||
1176 | lictext = [] | ||
1177 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
1178 | for line in f: | ||
1179 | # Drop opening statements | ||
1180 | if copyright_re.match(line): | ||
1181 | continue | ||
1182 | elif disclaimer_re.match(line): | ||
1183 | continue | ||
1184 | elif email_re.match(line): | ||
1185 | continue | ||
1186 | elif header_re.match(line): | ||
1187 | continue | ||
1188 | elif tag_re.match(line): | ||
1189 | continue | ||
1190 | elif url_re.match(line): | ||
1191 | continue | ||
1192 | elif license_title_re.match(line): | ||
1193 | continue | ||
1194 | elif license_statement_re.match(line): | ||
1195 | continue | ||
1196 | # Strip comment symbols | ||
1197 | line = line.replace('*', '') \ | ||
1198 | .replace('#', '') | ||
1199 | # Unify spelling | ||
1200 | line = line.replace('sub-license', 'sublicense') | ||
1201 | # Squash spaces | ||
1202 | line = oe.utils.squashspaces(line.strip()) | ||
1203 | # Replace smart quotes, double quotes and backticks with single quotes | ||
1204 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
1205 | # Unify brackets | ||
1206 | line = line.replace("{", "[").replace("}", "]") | ||
1207 | if line: | ||
1208 | lictext.append(line) | ||
1209 | |||
1210 | m = hashlib.md5() | ||
1211 | try: | ||
1212 | m.update(' '.join(lictext).encode('utf-8')) | ||
1213 | md5val = m.hexdigest() | ||
1214 | except UnicodeEncodeError: | ||
1215 | md5val = None | ||
1216 | lictext = '' | ||
1217 | return md5val, lictext | ||
1218 | |||
1219 | def guess_license(srctree, d): | ||
1220 | import bb | ||
1221 | md5sums = get_license_md5sums(d) | ||
1222 | |||
1223 | crunched_md5sums = crunch_known_licenses(d) | ||
1224 | |||
1225 | licenses = [] | ||
1226 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
1227 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go") | ||
1228 | licfiles = [] | ||
1229 | for root, dirs, files in os.walk(srctree): | ||
1230 | for fn in files: | ||
1231 | if fn.endswith(skip_extensions): | ||
1232 | continue | ||
1233 | for spec in licspecs: | ||
1234 | if fnmatch.fnmatch(fn, spec): | ||
1235 | fullpath = os.path.join(root, fn) | ||
1236 | if not fullpath in licfiles: | ||
1237 | licfiles.append(fullpath) | ||
1238 | for licfile in sorted(licfiles): | ||
1239 | md5value = bb.utils.md5_file(licfile) | ||
1240 | license = md5sums.get(md5value, None) | ||
1241 | if not license: | ||
1242 | crunched_md5, lictext = crunch_license(licfile) | ||
1243 | license = crunched_md5sums.get(crunched_md5, None) | ||
1244 | if lictext and not license: | ||
1245 | license = 'Unknown' | ||
1246 | logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \ | ||
1247 | "and replace `Unknown` with the license:\n" \ | ||
1248 | "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value)) | ||
1249 | if license: | ||
1250 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
1251 | |||
1252 | # FIXME should we grab at least one source file with a license header and add that too? | ||
1253 | |||
1254 | return licenses | ||
1255 | |||
1256 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): | 1040 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): |
1257 | """ | 1041 | """ |
1258 | Given a list of (license, path, md5sum) as returned by guess_license(), | 1042 | Given a list of (license, path, md5sum) as returned by match_licenses(), |
1259 | a dict of package name to path mappings, write out a set of | 1043 | a dict of package name to path mappings, write out a set of |
1260 | package-specific LICENSE values. | 1044 | package-specific LICENSE values. |
1261 | """ | 1045 | """ |
@@ -1284,6 +1068,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn | |||
1284 | outlicenses[pkgname] = licenses | 1068 | outlicenses[pkgname] = licenses |
1285 | return outlicenses | 1069 | return outlicenses |
1286 | 1070 | ||
1071 | def generate_common_licenses_chksums(common_licenses, d): | ||
1072 | lic_files_chksums = [] | ||
1073 | for license in tidy_licenses(common_licenses): | ||
1074 | licfile = '${COMMON_LICENSE_DIR}/' + license | ||
1075 | md5value = bb.utils.md5_file(d.expand(licfile)) | ||
1076 | lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value)) | ||
1077 | return lic_files_chksums | ||
1078 | |||
1287 | def read_pkgconfig_provides(d): | 1079 | def read_pkgconfig_provides(d): |
1288 | pkgdatadir = d.getVar('PKGDATA_DIR') | 1080 | pkgdatadir = d.getVar('PKGDATA_DIR') |
1289 | pkgmap = {} | 1081 | pkgmap = {} |
@@ -1418,4 +1210,3 @@ def register_commands(subparsers): | |||
1418 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) | 1210 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) |
1419 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') | 1211 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') |
1420 | parser_create.set_defaults(func=create_recipe) | 1212 | parser_create.set_defaults(func=create_recipe) |
1421 | |||
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py index a85a2f2786..4b1fa39d13 100644 --- a/scripts/lib/recipetool/create_go.py +++ b/scripts/lib/recipetool/create_go.py | |||
@@ -10,13 +10,7 @@ | |||
10 | # | 10 | # |
11 | 11 | ||
12 | 12 | ||
13 | from collections import namedtuple | ||
14 | from enum import Enum | ||
15 | from html.parser import HTMLParser | ||
16 | from recipetool.create import RecipeHandler, handle_license_vars | 13 | from recipetool.create import RecipeHandler, handle_license_vars |
17 | from recipetool.create import guess_license, tidy_licenses, fixup_license | ||
18 | from recipetool.create import determine_from_url | ||
19 | from urllib.error import URLError, HTTPError | ||
20 | 14 | ||
21 | import bb.utils | 15 | import bb.utils |
22 | import json | 16 | import json |
@@ -25,33 +19,20 @@ import os | |||
25 | import re | 19 | import re |
26 | import subprocess | 20 | import subprocess |
27 | import sys | 21 | import sys |
28 | import shutil | ||
29 | import tempfile | 22 | import tempfile |
30 | import urllib.parse | ||
31 | import urllib.request | ||
32 | 23 | ||
33 | 24 | ||
34 | GoImport = namedtuple('GoImport', 'root vcs url suffix') | ||
35 | logger = logging.getLogger('recipetool') | 25 | logger = logging.getLogger('recipetool') |
36 | CodeRepo = namedtuple( | ||
37 | 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor') | ||
38 | 26 | ||
39 | tinfoil = None | 27 | tinfoil = None |
40 | 28 | ||
41 | # Regular expression to parse pseudo semantic version | ||
42 | # see https://go.dev/ref/mod#pseudo-versions | ||
43 | re_pseudo_semver = re.compile( | ||
44 | r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") | ||
45 | # Regular expression to parse semantic version | ||
46 | re_semver = re.compile( | ||
47 | r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") | ||
48 | |||
49 | 29 | ||
50 | def tinfoil_init(instance): | 30 | def tinfoil_init(instance): |
51 | global tinfoil | 31 | global tinfoil |
52 | tinfoil = instance | 32 | tinfoil = instance |
53 | 33 | ||
54 | 34 | ||
35 | |||
55 | class GoRecipeHandler(RecipeHandler): | 36 | class GoRecipeHandler(RecipeHandler): |
56 | """Class to handle the go recipe creation""" | 37 | """Class to handle the go recipe creation""" |
57 | 38 | ||
@@ -83,578 +64,6 @@ class GoRecipeHandler(RecipeHandler): | |||
83 | 64 | ||
84 | return bindir | 65 | return bindir |
85 | 66 | ||
86 | def __resolve_repository_static(self, modulepath): | ||
87 | """Resolve the repository in a static manner | ||
88 | |||
89 | The method is based on the go implementation of | ||
90 | `repoRootFromVCSPaths` in | ||
91 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
92 | """ | ||
93 | |||
94 | url = urllib.parse.urlparse("https://" + modulepath) | ||
95 | req = urllib.request.Request(url.geturl()) | ||
96 | |||
97 | try: | ||
98 | resp = urllib.request.urlopen(req) | ||
99 | # Some modulepath are just redirects to github (or some other vcs | ||
100 | # hoster). Therefore, we check if this modulepath redirects to | ||
101 | # somewhere else | ||
102 | if resp.geturl() != url.geturl(): | ||
103 | bb.debug(1, "%s is redirectred to %s" % | ||
104 | (url.geturl(), resp.geturl())) | ||
105 | url = urllib.parse.urlparse(resp.geturl()) | ||
106 | modulepath = url.netloc + url.path | ||
107 | |||
108 | except URLError as url_err: | ||
109 | # This is probably because the module path | ||
110 | # contains the subdir and major path. Thus, | ||
111 | # we ignore this error for now | ||
112 | logger.debug( | ||
113 | 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err))) | ||
114 | |||
115 | host, _, _ = modulepath.partition('/') | ||
116 | |||
117 | class vcs(Enum): | ||
118 | pathprefix = "pathprefix" | ||
119 | regexp = "regexp" | ||
120 | type = "type" | ||
121 | repo = "repo" | ||
122 | check = "check" | ||
123 | schemelessRepo = "schemelessRepo" | ||
124 | |||
125 | # GitHub | ||
126 | vcsGitHub = {} | ||
127 | vcsGitHub[vcs.pathprefix] = "github.com" | ||
128 | vcsGitHub[vcs.regexp] = re.compile( | ||
129 | r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
130 | vcsGitHub[vcs.type] = "git" | ||
131 | vcsGitHub[vcs.repo] = "https://\\g<root>" | ||
132 | |||
133 | # Bitbucket | ||
134 | vcsBitbucket = {} | ||
135 | vcsBitbucket[vcs.pathprefix] = "bitbucket.org" | ||
136 | vcsBitbucket[vcs.regexp] = re.compile( | ||
137 | r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
138 | vcsBitbucket[vcs.type] = "git" | ||
139 | vcsBitbucket[vcs.repo] = "https://\\g<root>" | ||
140 | |||
141 | # IBM DevOps Services (JazzHub) | ||
142 | vcsIBMDevOps = {} | ||
143 | vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git" | ||
144 | vcsIBMDevOps[vcs.regexp] = re.compile( | ||
145 | r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
146 | vcsIBMDevOps[vcs.type] = "git" | ||
147 | vcsIBMDevOps[vcs.repo] = "https://\\g<root>" | ||
148 | |||
149 | # Git at Apache | ||
150 | vcsApacheGit = {} | ||
151 | vcsApacheGit[vcs.pathprefix] = "git.apache.org" | ||
152 | vcsApacheGit[vcs.regexp] = re.compile( | ||
153 | r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
154 | vcsApacheGit[vcs.type] = "git" | ||
155 | vcsApacheGit[vcs.repo] = "https://\\g<root>" | ||
156 | |||
157 | # Git at OpenStack | ||
158 | vcsOpenStackGit = {} | ||
159 | vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org" | ||
160 | vcsOpenStackGit[vcs.regexp] = re.compile( | ||
161 | r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
162 | vcsOpenStackGit[vcs.type] = "git" | ||
163 | vcsOpenStackGit[vcs.repo] = "https://\\g<root>" | ||
164 | |||
165 | # chiselapp.com for fossil | ||
166 | vcsChiselapp = {} | ||
167 | vcsChiselapp[vcs.pathprefix] = "chiselapp.com" | ||
168 | vcsChiselapp[vcs.regexp] = re.compile( | ||
169 | r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$') | ||
170 | vcsChiselapp[vcs.type] = "fossil" | ||
171 | vcsChiselapp[vcs.repo] = "https://\\g<root>" | ||
172 | |||
173 | # General syntax for any server. | ||
174 | # Must be last. | ||
175 | vcsGeneralServer = {} | ||
176 | vcsGeneralServer[vcs.regexp] = re.compile( | ||
177 | "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$") | ||
178 | vcsGeneralServer[vcs.schemelessRepo] = True | ||
179 | |||
180 | vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps, | ||
181 | vcsApacheGit, vcsOpenStackGit, vcsChiselapp, | ||
182 | vcsGeneralServer] | ||
183 | |||
184 | if modulepath.startswith("example.net") or modulepath == "rsc.io": | ||
185 | logger.warning("Suspicious module path %s" % modulepath) | ||
186 | return None | ||
187 | if modulepath.startswith("http:") or modulepath.startswith("https:"): | ||
188 | logger.warning("Import path should not start with %s %s" % | ||
189 | ("http", "https")) | ||
190 | return None | ||
191 | |||
192 | rootpath = None | ||
193 | vcstype = None | ||
194 | repourl = None | ||
195 | suffix = None | ||
196 | |||
197 | for srv in vcsPaths: | ||
198 | m = srv[vcs.regexp].match(modulepath) | ||
199 | if vcs.pathprefix in srv: | ||
200 | if host == srv[vcs.pathprefix]: | ||
201 | rootpath = m.group('root') | ||
202 | vcstype = srv[vcs.type] | ||
203 | repourl = m.expand(srv[vcs.repo]) | ||
204 | suffix = m.group('suffix') | ||
205 | break | ||
206 | elif m and srv[vcs.schemelessRepo]: | ||
207 | rootpath = m.group('root') | ||
208 | vcstype = m[vcs.type] | ||
209 | repourl = m[vcs.repo] | ||
210 | suffix = m.group('suffix') | ||
211 | break | ||
212 | |||
213 | return GoImport(rootpath, vcstype, repourl, suffix) | ||
214 | |||
215 | def __resolve_repository_dynamic(self, modulepath): | ||
216 | """Resolve the repository root in a dynamic manner. | ||
217 | |||
218 | The method is based on the go implementation of | ||
219 | `repoRootForImportDynamic` in | ||
220 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
221 | """ | ||
222 | url = urllib.parse.urlparse("https://" + modulepath) | ||
223 | |||
224 | class GoImportHTMLParser(HTMLParser): | ||
225 | |||
226 | def __init__(self): | ||
227 | super().__init__() | ||
228 | self.__srv = {} | ||
229 | |||
230 | def handle_starttag(self, tag, attrs): | ||
231 | if tag == 'meta' and list( | ||
232 | filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)): | ||
233 | content = list( | ||
234 | filter(lambda a: (a[0] == 'content'), attrs)) | ||
235 | if content: | ||
236 | srv = content[0][1].split() | ||
237 | self.__srv[srv[0]] = srv | ||
238 | |||
239 | def go_import(self, modulepath): | ||
240 | if modulepath in self.__srv: | ||
241 | srv = self.__srv[modulepath] | ||
242 | return GoImport(srv[0], srv[1], srv[2], None) | ||
243 | return None | ||
244 | |||
245 | url = url.geturl() + "?go-get=1" | ||
246 | req = urllib.request.Request(url) | ||
247 | |||
248 | try: | ||
249 | body = urllib.request.urlopen(req).read() | ||
250 | except HTTPError as http_err: | ||
251 | logger.warning( | ||
252 | "Unclean status when fetching page from [%s]: %s", url, str(http_err)) | ||
253 | body = http_err.fp.read() | ||
254 | except URLError as url_err: | ||
255 | logger.warning( | ||
256 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
257 | return None | ||
258 | |||
259 | parser = GoImportHTMLParser() | ||
260 | parser.feed(body.decode('utf-8')) | ||
261 | parser.close() | ||
262 | |||
263 | return parser.go_import(modulepath) | ||
264 | |||
265 | def __resolve_from_golang_proxy(self, modulepath, version): | ||
266 | """ | ||
267 | Resolves repository data from golang proxy | ||
268 | """ | ||
269 | url = urllib.parse.urlparse("https://proxy.golang.org/" | ||
270 | + modulepath | ||
271 | + "/@v/" | ||
272 | + version | ||
273 | + ".info") | ||
274 | |||
275 | # Transform url to lower case, golang proxy doesn't like mixed case | ||
276 | req = urllib.request.Request(url.geturl().lower()) | ||
277 | |||
278 | try: | ||
279 | resp = urllib.request.urlopen(req) | ||
280 | except URLError as url_err: | ||
281 | logger.warning( | ||
282 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
283 | return None | ||
284 | |||
285 | golang_proxy_res = resp.read().decode('utf-8') | ||
286 | modinfo = json.loads(golang_proxy_res) | ||
287 | |||
288 | if modinfo and 'Origin' in modinfo: | ||
289 | origin = modinfo['Origin'] | ||
290 | _root_url = urllib.parse.urlparse(origin['URL']) | ||
291 | |||
292 | # We normalize the repo URL since we don't want the scheme in it | ||
293 | _subdir = origin['Subdir'] if 'Subdir' in origin else None | ||
294 | _root, _, _ = self.__split_path_version(modulepath) | ||
295 | if _subdir: | ||
296 | _root = _root[:-len(_subdir)].strip('/') | ||
297 | |||
298 | _commit = origin['Hash'] | ||
299 | _vcs = origin['VCS'] | ||
300 | return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit) | ||
301 | |||
302 | return None | ||
303 | |||
304 | def __resolve_repository(self, modulepath): | ||
305 | """ | ||
306 | Resolves src uri from go module-path | ||
307 | """ | ||
308 | repodata = self.__resolve_repository_static(modulepath) | ||
309 | if not repodata or not repodata.url: | ||
310 | repodata = self.__resolve_repository_dynamic(modulepath) | ||
311 | if not repodata or not repodata.url: | ||
312 | logger.error( | ||
313 | "Could not resolve repository for module path '%s'" % modulepath) | ||
314 | # There is no way to recover from this | ||
315 | sys.exit(14) | ||
316 | if repodata: | ||
317 | logger.debug(1, "Resolved download path for import '%s' => %s" % ( | ||
318 | modulepath, repodata.url)) | ||
319 | return repodata | ||
320 | |||
321 | def __split_path_version(self, path): | ||
322 | i = len(path) | ||
323 | dot = False | ||
324 | for j in range(i, 0, -1): | ||
325 | if path[j - 1] < '0' or path[j - 1] > '9': | ||
326 | break | ||
327 | if path[j - 1] == '.': | ||
328 | dot = True | ||
329 | break | ||
330 | i = j - 1 | ||
331 | |||
332 | if i <= 1 or i == len( | ||
333 | path) or path[i - 1] != 'v' or path[i - 2] != '/': | ||
334 | return path, "", True | ||
335 | |||
336 | prefix, pathMajor = path[:i - 2], path[i - 2:] | ||
337 | if dot or len( | ||
338 | pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1": | ||
339 | return path, "", False | ||
340 | |||
341 | return prefix, pathMajor, True | ||
342 | |||
343 | def __get_path_major(self, pathMajor): | ||
344 | if not pathMajor: | ||
345 | return "" | ||
346 | |||
347 | if pathMajor[0] != '/' and pathMajor[0] != '.': | ||
348 | logger.error( | ||
349 | "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor) | ||
350 | |||
351 | if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"): | ||
352 | pathMajor = pathMajor[:len("-unstable") - 2] | ||
353 | |||
354 | return pathMajor[1:] | ||
355 | |||
356 | def __build_coderepo(self, repo, path): | ||
357 | codedir = "" | ||
358 | pathprefix, pathMajor, _ = self.__split_path_version(path) | ||
359 | if repo.root == path: | ||
360 | pathprefix = path | ||
361 | elif path.startswith(repo.root): | ||
362 | codedir = pathprefix[len(repo.root):].strip('/') | ||
363 | |||
364 | pseudoMajor = self.__get_path_major(pathMajor) | ||
365 | |||
366 | logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'", | ||
367 | repo.root, codedir, pathprefix, pathMajor, pseudoMajor) | ||
368 | |||
369 | return CodeRepo(path, repo.root, codedir, | ||
370 | pathMajor, pathprefix, pseudoMajor) | ||
371 | |||
372 | def __resolve_version(self, repo, path, version): | ||
373 | hash = None | ||
374 | coderoot = self.__build_coderepo(repo, path) | ||
375 | |||
376 | def vcs_fetch_all(): | ||
377 | tmpdir = tempfile.mkdtemp() | ||
378 | clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir) | ||
379 | bb.process.run(clone_cmd) | ||
380 | log_cmd = "git log --all --pretty='%H %d' --decorate=short" | ||
381 | output, _ = bb.process.run( | ||
382 | log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir) | ||
383 | bb.utils.prunedir(tmpdir) | ||
384 | return output.strip().split('\n') | ||
385 | |||
386 | def vcs_fetch_remote(tag): | ||
387 | # add * to grab ^{} | ||
388 | refs = {} | ||
389 | ls_remote_cmd = "git ls-remote -q --tags {} {}*".format( | ||
390 | repo.url, tag) | ||
391 | output, _ = bb.process.run(ls_remote_cmd) | ||
392 | output = output.strip().split('\n') | ||
393 | for line in output: | ||
394 | f = line.split(maxsplit=1) | ||
395 | if len(f) != 2: | ||
396 | continue | ||
397 | |||
398 | for prefix in ["HEAD", "refs/heads/", "refs/tags/"]: | ||
399 | if f[1].startswith(prefix): | ||
400 | refs[f[1][len(prefix):]] = f[0] | ||
401 | |||
402 | for key, hash in refs.items(): | ||
403 | if key.endswith(r"^{}"): | ||
404 | refs[key.strip(r"^{}")] = hash | ||
405 | |||
406 | return refs[tag] | ||
407 | |||
408 | m_pseudo_semver = re_pseudo_semver.match(version) | ||
409 | |||
410 | if m_pseudo_semver: | ||
411 | remote_refs = vcs_fetch_all() | ||
412 | short_commit = m_pseudo_semver.group('commithash') | ||
413 | for l in remote_refs: | ||
414 | r = l.split(maxsplit=1) | ||
415 | sha1 = r[0] if len(r) else None | ||
416 | if not sha1: | ||
417 | logger.error( | ||
418 | "Ups: could not resolve abbref commit for %s" % short_commit) | ||
419 | |||
420 | elif sha1.startswith(short_commit): | ||
421 | hash = sha1 | ||
422 | break | ||
423 | else: | ||
424 | m_semver = re_semver.match(version) | ||
425 | if m_semver: | ||
426 | |||
427 | def get_sha1_remote(re): | ||
428 | rsha1 = None | ||
429 | for line in remote_refs: | ||
430 | # Split lines of the following format: | ||
431 | # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag) | ||
432 | lineparts = line.split(maxsplit=1) | ||
433 | sha1 = lineparts[0] if len(lineparts) else None | ||
434 | refstring = lineparts[1] if len( | ||
435 | lineparts) == 2 else None | ||
436 | if refstring: | ||
437 | # Normalize tag string and split in case of multiple | ||
438 | # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...) | ||
439 | refs = refstring.strip('(), ').split(',') | ||
440 | for ref in refs: | ||
441 | if re.match(ref.strip()): | ||
442 | rsha1 = sha1 | ||
443 | return rsha1 | ||
444 | |||
445 | semver = "v" + m_semver.group('major') + "."\ | ||
446 | + m_semver.group('minor') + "."\ | ||
447 | + m_semver.group('patch') \ | ||
448 | + (("-" + m_semver.group('prerelease')) | ||
449 | if m_semver.group('prerelease') else "") | ||
450 | |||
451 | tag = os.path.join( | ||
452 | coderoot.codeDir, semver) if coderoot.codeDir else semver | ||
453 | |||
454 | # probe tag using 'ls-remote', which is faster than fetching | ||
455 | # complete history | ||
456 | hash = vcs_fetch_remote(tag) | ||
457 | if not hash: | ||
458 | # backup: fetch complete history | ||
459 | remote_refs = vcs_fetch_all() | ||
460 | hash = get_sha1_remote( | ||
461 | re.compile(fr"(tag:|HEAD ->) ({tag})")) | ||
462 | |||
463 | logger.debug( | ||
464 | "Resolving commit for tag '%s' -> '%s'", tag, hash) | ||
465 | return hash | ||
466 | |||
467 | def __generate_srcuri_inline_fcn(self, path, version, replaces=None): | ||
468 | """Generate SRC_URI functions for go imports""" | ||
469 | |||
470 | logger.info("Resolving repository for module %s", path) | ||
471 | # First try to resolve repo and commit from golang proxy | ||
472 | # Most info is already there and we don't have to go through the | ||
473 | # repository or even perform the version resolve magic | ||
474 | golang_proxy_info = self.__resolve_from_golang_proxy(path, version) | ||
475 | if golang_proxy_info: | ||
476 | repo = golang_proxy_info[0] | ||
477 | commit = golang_proxy_info[1] | ||
478 | else: | ||
479 | # Fallback | ||
480 | # Resolve repository by 'hand' | ||
481 | repo = self.__resolve_repository(path) | ||
482 | commit = self.__resolve_version(repo, path, version) | ||
483 | |||
484 | url = urllib.parse.urlparse(repo.url) | ||
485 | repo_url = url.netloc + url.path | ||
486 | |||
487 | coderoot = self.__build_coderepo(repo, path) | ||
488 | |||
489 | inline_fcn = "${@go_src_uri(" | ||
490 | inline_fcn += f"'{repo_url}','{version}'" | ||
491 | if repo_url != path: | ||
492 | inline_fcn += f",path='{path}'" | ||
493 | if coderoot.codeDir: | ||
494 | inline_fcn += f",subdir='{coderoot.codeDir}'" | ||
495 | if repo.vcs != 'git': | ||
496 | inline_fcn += f",vcs='{repo.vcs}'" | ||
497 | if replaces: | ||
498 | inline_fcn += f",replaces='{replaces}'" | ||
499 | if coderoot.pathMajor: | ||
500 | inline_fcn += f",pathmajor='{coderoot.pathMajor}'" | ||
501 | inline_fcn += ")}" | ||
502 | |||
503 | return inline_fcn, commit | ||
504 | |||
505 | def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
506 | |||
507 | import re | ||
508 | src_uris = [] | ||
509 | src_revs = [] | ||
510 | |||
511 | def generate_src_rev(path, version, commithash): | ||
512 | src_rev = f"# {path}@{version} => {commithash}\n" | ||
513 | # Ups...maybe someone manipulated the source repository and the | ||
514 | # version or commit could not be resolved. This is a sign of | ||
515 | # a) the supply chain was manipulated (bad) | ||
516 | # b) the implementation for the version resolving didn't work | ||
517 | # anymore (less bad) | ||
518 | if not commithash: | ||
519 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
520 | src_rev += f"#!!! Could not resolve version !!!\n" | ||
521 | src_rev += f"#!!! Possible supply chain attack !!!\n" | ||
522 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
523 | src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\"" | ||
524 | |||
525 | return src_rev | ||
526 | |||
527 | # we first go over replacement list, because we are essentialy | ||
528 | # interested only in the replaced path | ||
529 | if go_mod['Replace']: | ||
530 | for replacement in go_mod['Replace']: | ||
531 | oldpath = replacement['Old']['Path'] | ||
532 | path = replacement['New']['Path'] | ||
533 | version = '' | ||
534 | if 'Version' in replacement['New']: | ||
535 | version = replacement['New']['Version'] | ||
536 | |||
537 | if os.path.exists(os.path.join(srctree, path)): | ||
538 | # the module refers to the local path, remove it from requirement list | ||
539 | # because it's a local module | ||
540 | go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath] | ||
541 | else: | ||
542 | # Replace the path and the version, so we don't iterate replacement list anymore | ||
543 | for require in go_mod['Require']: | ||
544 | if require['Path'] == oldpath: | ||
545 | require.update({'Path': path, 'Version': version}) | ||
546 | break | ||
547 | |||
548 | for require in go_mod['Require']: | ||
549 | path = require['Path'] | ||
550 | version = require['Version'] | ||
551 | |||
552 | inline_fcn, commithash = self.__generate_srcuri_inline_fcn( | ||
553 | path, version) | ||
554 | src_uris.append(inline_fcn) | ||
555 | src_revs.append(generate_src_rev(path, version, commithash)) | ||
556 | |||
557 | # strip version part from module URL /vXX | ||
558 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
559 | pn, _ = determine_from_url(baseurl) | ||
560 | go_mods_basename = "%s-modules.inc" % pn | ||
561 | |||
562 | go_mods_filename = os.path.join(localfilesdir, go_mods_basename) | ||
563 | with open(go_mods_filename, "w") as f: | ||
564 | # We introduce this indirection to make the tests a little easier | ||
565 | f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n") | ||
566 | f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n") | ||
567 | for uri in src_uris: | ||
568 | f.write(" " + uri + " \\\n") | ||
569 | f.write("\"\n\n") | ||
570 | for rev in src_revs: | ||
571 | f.write(rev + "\n") | ||
572 | |||
573 | extravalues['extrafiles'][go_mods_basename] = go_mods_filename | ||
574 | |||
575 | def __go_run_cmd(self, cmd, cwd, d): | ||
576 | return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')), | ||
577 | shell=True, cwd=cwd) | ||
578 | |||
579 | def __go_native_version(self, d): | ||
580 | stdout, _ = self.__go_run_cmd("go version", None, d) | ||
581 | m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout) | ||
582 | major = int(m.group(2)) | ||
583 | minor = int(m.group(3)) | ||
584 | patch = int(m.group(4)) | ||
585 | |||
586 | return major, minor, patch | ||
587 | |||
588 | def __go_mod_patch(self, srctree, localfilesdir, extravalues, d): | ||
589 | |||
590 | patchfilename = "go.mod.patch" | ||
591 | go_native_version_major, go_native_version_minor, _ = self.__go_native_version( | ||
592 | d) | ||
593 | self.__go_run_cmd("go mod tidy -go=%d.%d" % | ||
594 | (go_native_version_major, go_native_version_minor), srctree, d) | ||
595 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
596 | |||
597 | # Create patch in order to upgrade go version | ||
598 | self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d) | ||
599 | # Restore original state | ||
600 | self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d) | ||
601 | |||
602 | go_mod = json.loads(stdout) | ||
603 | tmpfile = os.path.join(localfilesdir, patchfilename) | ||
604 | shutil.move(os.path.join(srctree, patchfilename), tmpfile) | ||
605 | |||
606 | extravalues['extrafiles'][patchfilename] = tmpfile | ||
607 | |||
608 | return go_mod, patchfilename | ||
609 | |||
610 | def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
611 | # Perform vendoring to retrieve the correct modules.txt | ||
612 | tmp_vendor_dir = tempfile.mkdtemp() | ||
613 | |||
614 | # -v causes to go to print modules.txt to stderr | ||
615 | _, stderr = self.__go_run_cmd( | ||
616 | "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d) | ||
617 | |||
618 | modules_txt_basename = "modules.txt" | ||
619 | modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename) | ||
620 | with open(modules_txt_filename, "w") as f: | ||
621 | f.write(stderr) | ||
622 | |||
623 | extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename | ||
624 | |||
625 | licenses = [] | ||
626 | lic_files_chksum = [] | ||
627 | licvalues = guess_license(tmp_vendor_dir, d) | ||
628 | shutil.rmtree(tmp_vendor_dir) | ||
629 | |||
630 | if licvalues: | ||
631 | for licvalue in licvalues: | ||
632 | license = licvalue[0] | ||
633 | lics = tidy_licenses(fixup_license(license)) | ||
634 | lics = [lic for lic in lics if lic not in licenses] | ||
635 | if len(lics): | ||
636 | licenses.extend(lics) | ||
637 | lic_files_chksum.append( | ||
638 | 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
639 | |||
640 | # strip version part from module URL /vXX | ||
641 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
642 | pn, _ = determine_from_url(baseurl) | ||
643 | licenses_basename = "%s-licenses.inc" % pn | ||
644 | |||
645 | licenses_filename = os.path.join(localfilesdir, licenses_basename) | ||
646 | with open(licenses_filename, "w") as f: | ||
647 | f.write("GO_MOD_LICENSES = \"%s\"\n\n" % | ||
648 | ' & '.join(sorted(licenses, key=str.casefold))) | ||
649 | # We introduce this indirection to make the tests a little easier | ||
650 | f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n") | ||
651 | f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n") | ||
652 | for lic in lic_files_chksum: | ||
653 | f.write(" " + lic + " \\\n") | ||
654 | f.write("\"\n") | ||
655 | |||
656 | extravalues['extrafiles'][licenses_basename] = licenses_filename | ||
657 | |||
658 | def process(self, srctree, classes, lines_before, | 67 | def process(self, srctree, classes, lines_before, |
659 | lines_after, handled, extravalues): | 68 | lines_after, handled, extravalues): |
660 | 69 | ||
@@ -665,63 +74,52 @@ class GoRecipeHandler(RecipeHandler): | |||
665 | if not files: | 74 | if not files: |
666 | return False | 75 | return False |
667 | 76 | ||
668 | d = bb.data.createCopy(tinfoil.config_data) | ||
669 | go_bindir = self.__ensure_go() | 77 | go_bindir = self.__ensure_go() |
670 | if not go_bindir: | 78 | if not go_bindir: |
671 | sys.exit(14) | 79 | sys.exit(14) |
672 | 80 | ||
673 | d.prependVar('PATH', '%s:' % go_bindir) | ||
674 | handled.append('buildsystem') | 81 | handled.append('buildsystem') |
675 | classes.append("go-vendor") | 82 | classes.append("go-mod") |
676 | 83 | ||
677 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | 84 | # Use go-mod-update-modules to set the full SRC_URI and LICENSE |
85 | classes.append("go-mod-update-modules") | ||
86 | extravalues["run_tasks"] = "update_modules" | ||
678 | 87 | ||
679 | go_mod = json.loads(stdout) | 88 | with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir: |
680 | go_import = go_mod['Module']['Path'] | 89 | env = dict(os.environ) |
681 | go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) | 90 | env["PATH"] += f":{go_bindir}" |
682 | go_version_major = int(go_version_match.group(1)) | 91 | env['GOMODCACHE'] = tmp_mod_dir |
683 | go_version_minor = int(go_version_match.group(2)) | ||
684 | src_uris = [] | ||
685 | 92 | ||
686 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') | 93 | stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True) |
687 | extravalues.setdefault('extrafiles', {}) | 94 | go_mod = json.loads(stdout) |
95 | go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path']) | ||
688 | 96 | ||
689 | # Use an explicit name determined from the module name because it | 97 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') |
690 | # might differ from the actual URL for replaced modules | 98 | extravalues.setdefault('extrafiles', {}) |
691 | # strip version part from module URL /vXX | ||
692 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
693 | pn, _ = determine_from_url(baseurl) | ||
694 | 99 | ||
695 | # go.mod files with version < 1.17 may not include all indirect | 100 | # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files |
696 | # dependencies. Thus, we have to upgrade the go version. | 101 | basename = "{pn}-licenses.inc" |
697 | if go_version_major == 1 and go_version_minor < 17: | 102 | filename = os.path.join(localfilesdir, basename) |
698 | logger.warning( | 103 | with open(filename, "w") as f: |
699 | "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") | 104 | f.write("# FROM RECIPETOOL\n") |
700 | go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, | 105 | extravalues['extrafiles'][f"../{basename}"] = filename |
701 | extravalues, d) | ||
702 | src_uris.append( | ||
703 | "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename)) | ||
704 | 106 | ||
705 | # Check whether the module is vendored. If so, we have nothing to do. | 107 | basename = "{pn}-go-mods.inc" |
706 | # Otherwise we gather all dependencies and add them to the recipe | 108 | filename = os.path.join(localfilesdir, basename) |
707 | if not os.path.exists(os.path.join(srctree, "vendor")): | 109 | with open(filename, "w") as f: |
110 | f.write("# FROM RECIPETOOL\n") | ||
111 | extravalues['extrafiles'][f"../{basename}"] = filename | ||
708 | 112 | ||
709 | # Write additional $BPN-modules.inc file | 113 | # Do generic license handling |
710 | self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) | 114 | d = bb.data.createCopy(tinfoil.config_data) |
711 | lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") | 115 | handle_license_vars(srctree, lines_before, handled, extravalues, d) |
712 | lines_before.append("require %s-licenses.inc" % (pn)) | 116 | self.__rewrite_lic_vars(lines_before) |
713 | 117 | ||
714 | self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) | 118 | self.__rewrite_src_uri(lines_before) |
715 | 119 | ||
716 | self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) | 120 | lines_before.append('require ${BPN}-licenses.inc') |
717 | lines_before.append("require %s-modules.inc" % (pn)) | 121 | lines_before.append('require ${BPN}-go-mods.inc') |
718 | 122 | lines_before.append(f'GO_IMPORT = "{go_import}"') | |
719 | # Do generic license handling | ||
720 | handle_license_vars(srctree, lines_before, handled, extravalues, d) | ||
721 | self.__rewrite_lic_uri(lines_before) | ||
722 | |||
723 | lines_before.append("GO_IMPORT = \"{}\"".format(baseurl)) | ||
724 | lines_before.append("SRCREV_FORMAT = \"${BPN}\"") | ||
725 | 123 | ||
726 | def __update_lines_before(self, updated, newlines, lines_before): | 124 | def __update_lines_before(self, updated, newlines, lines_before): |
727 | if updated: | 125 | if updated: |
@@ -733,9 +131,9 @@ class GoRecipeHandler(RecipeHandler): | |||
733 | lines_before.append(line) | 131 | lines_before.append(line) |
734 | return updated | 132 | return updated |
735 | 133 | ||
736 | def __rewrite_lic_uri(self, lines_before): | 134 | def __rewrite_lic_vars(self, lines_before): |
737 | |||
738 | def varfunc(varname, origvalue, op, newlines): | 135 | def varfunc(varname, origvalue, op, newlines): |
136 | import urllib.parse | ||
739 | if varname == 'LIC_FILES_CHKSUM': | 137 | if varname == 'LIC_FILES_CHKSUM': |
740 | new_licenses = [] | 138 | new_licenses = [] |
741 | licenses = origvalue.split('\\') | 139 | licenses = origvalue.split('\\') |
@@ -760,12 +158,11 @@ class GoRecipeHandler(RecipeHandler): | |||
760 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) | 158 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) |
761 | return self.__update_lines_before(updated, newlines, lines_before) | 159 | return self.__update_lines_before(updated, newlines, lines_before) |
762 | 160 | ||
763 | def __rewrite_src_uri(self, lines_before, additional_uris = []): | 161 | def __rewrite_src_uri(self, lines_before): |
764 | 162 | ||
765 | def varfunc(varname, origvalue, op, newlines): | 163 | def varfunc(varname, origvalue, op, newlines): |
766 | if varname == 'SRC_URI': | 164 | if varname == 'SRC_URI': |
767 | src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] | 165 | src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'] |
768 | src_uri.extend(additional_uris) | ||
769 | return src_uri, None, -1, True | 166 | return src_uri, None, -1, True |
770 | return origvalue, None, 0, True | 167 | return origvalue, None, 0, True |
771 | 168 | ||
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py index 113a89f6a6..8c4cdd5234 100644 --- a/scripts/lib/recipetool/create_npm.py +++ b/scripts/lib/recipetool/create_npm.py | |||
@@ -15,9 +15,9 @@ import bb | |||
15 | from bb.fetch2.npm import NpmEnvironment | 15 | from bb.fetch2.npm import NpmEnvironment |
16 | from bb.fetch2.npm import npm_package | 16 | from bb.fetch2.npm import npm_package |
17 | from bb.fetch2.npmsw import foreach_dependencies | 17 | from bb.fetch2.npmsw import foreach_dependencies |
18 | from oe.license_finder import match_licenses, find_license_files | ||
18 | from recipetool.create import RecipeHandler | 19 | from recipetool.create import RecipeHandler |
19 | from recipetool.create import get_license_md5sums | 20 | from recipetool.create import generate_common_licenses_chksums |
20 | from recipetool.create import guess_license | ||
21 | from recipetool.create import split_pkg_licenses | 21 | from recipetool.create import split_pkg_licenses |
22 | logger = logging.getLogger('recipetool') | 22 | logger = logging.getLogger('recipetool') |
23 | 23 | ||
@@ -112,40 +112,54 @@ class NpmRecipeHandler(RecipeHandler): | |||
112 | """Return the extra license files and the list of packages""" | 112 | """Return the extra license files and the list of packages""" |
113 | licfiles = [] | 113 | licfiles = [] |
114 | packages = {} | 114 | packages = {} |
115 | # Licenses from package.json will point to COMMON_LICENSE_DIR so we need | ||
116 | # to associate them explicitely to packages for split_pkg_licenses() | ||
117 | fallback_licenses = dict() | ||
118 | |||
119 | def _find_package_licenses(destdir): | ||
120 | """Either find license files, or use package.json metadata""" | ||
121 | def _get_licenses_from_package_json(package_json): | ||
122 | with open(os.path.join(srctree, package_json), "r") as f: | ||
123 | data = json.load(f) | ||
124 | if "license" in data: | ||
125 | licenses = data["license"].split(" ") | ||
126 | licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"] | ||
127 | return [], licenses | ||
128 | else: | ||
129 | return [package_json], None | ||
115 | 130 | ||
116 | # Handle the parent package | ||
117 | packages["${PN}"] = "" | ||
118 | |||
119 | def _licfiles_append_fallback_readme_files(destdir): | ||
120 | """Append README files as fallback to license files if a license files is missing""" | ||
121 | |||
122 | fallback = True | ||
123 | readmes = [] | ||
124 | basedir = os.path.join(srctree, destdir) | 131 | basedir = os.path.join(srctree, destdir) |
125 | for fn in os.listdir(basedir): | 132 | licfiles = find_license_files(basedir) |
126 | upper = fn.upper() | 133 | if len(licfiles) > 0: |
127 | if upper.startswith("README"): | 134 | return licfiles, None |
128 | fullpath = os.path.join(basedir, fn) | 135 | else: |
129 | readmes.append(fullpath) | 136 | # A license wasn't found in the package directory, so we'll use the package.json metadata |
130 | if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: | 137 | pkg_json = os.path.join(basedir, "package.json") |
131 | fallback = False | 138 | return _get_licenses_from_package_json(pkg_json) |
132 | if fallback: | 139 | |
133 | for readme in readmes: | 140 | def _get_package_licenses(destdir, package): |
134 | licfiles.append(os.path.relpath(readme, srctree)) | 141 | (package_licfiles, package_licenses) = _find_package_licenses(destdir) |
142 | if package_licfiles: | ||
143 | licfiles.extend(package_licfiles) | ||
144 | else: | ||
145 | fallback_licenses[package] = package_licenses | ||
135 | 146 | ||
136 | # Handle the dependencies | 147 | # Handle the dependencies |
137 | def _handle_dependency(name, params, destdir): | 148 | def _handle_dependency(name, params, destdir): |
138 | deptree = destdir.split('node_modules/') | 149 | deptree = destdir.split('node_modules/') |
139 | suffix = "-".join([npm_package(dep) for dep in deptree]) | 150 | suffix = "-".join([npm_package(dep) for dep in deptree]) |
140 | packages["${PN}" + suffix] = destdir | 151 | packages["${PN}" + suffix] = destdir |
141 | _licfiles_append_fallback_readme_files(destdir) | 152 | _get_package_licenses(destdir, "${PN}" + suffix) |
142 | 153 | ||
143 | with open(shrinkwrap_file, "r") as f: | 154 | with open(shrinkwrap_file, "r") as f: |
144 | shrinkwrap = json.load(f) | 155 | shrinkwrap = json.load(f) |
145 | |||
146 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) | 156 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) |
147 | 157 | ||
148 | return licfiles, packages | 158 | # Handle the parent package |
159 | packages["${PN}"] = "" | ||
160 | _get_package_licenses(srctree, "${PN}") | ||
161 | |||
162 | return licfiles, packages, fallback_licenses | ||
149 | 163 | ||
150 | # Handle the peer dependencies | 164 | # Handle the peer dependencies |
151 | def _handle_peer_dependency(self, shrinkwrap_file): | 165 | def _handle_peer_dependency(self, shrinkwrap_file): |
@@ -266,36 +280,12 @@ class NpmRecipeHandler(RecipeHandler): | |||
266 | fetcher.unpack(srctree) | 280 | fetcher.unpack(srctree) |
267 | 281 | ||
268 | bb.note("Handling licences ...") | 282 | bb.note("Handling licences ...") |
269 | (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) | 283 | (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev) |
270 | 284 | licvalues = match_licenses(licfiles, srctree, d) | |
271 | def _guess_odd_license(licfiles): | 285 | split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses) |
272 | import bb | 286 | fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist] |
273 | 287 | extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d) | |
274 | md5sums = get_license_md5sums(d, linenumbers=True) | 288 | extravalues["LICENSE"] = fallback_licenses_flat |
275 | |||
276 | chksums = [] | ||
277 | licenses = [] | ||
278 | for licfile in licfiles: | ||
279 | f = os.path.join(srctree, licfile) | ||
280 | md5value = bb.utils.md5_file(f) | ||
281 | (license, beginline, endline, md5) = md5sums.get(md5value, | ||
282 | (None, "", "", "")) | ||
283 | if not license: | ||
284 | license = "Unknown" | ||
285 | logger.info("Please add the following line for '%s' to a " | ||
286 | "'lib/recipetool/licenses.csv' and replace `Unknown`, " | ||
287 | "`X`, `Y` and `MD5` with the license, begin line, " | ||
288 | "end line and partial MD5 checksum:\n" \ | ||
289 | "%s,Unknown,X,Y,MD5" % (licfile, md5value)) | ||
290 | chksums.append("file://%s%s%s;md5=%s" % (licfile, | ||
291 | ";beginline=%s" % (beginline) if beginline else "", | ||
292 | ";endline=%s" % (endline) if endline else "", | ||
293 | md5 if md5 else md5value)) | ||
294 | licenses.append((license, licfile, md5value)) | ||
295 | return (licenses, chksums) | ||
296 | |||
297 | (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles) | ||
298 | split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after) | ||
299 | 289 | ||
300 | classes.append("npm") | 290 | classes.append("npm") |
301 | handled.append("buildsystem") | 291 | handled.append("buildsystem") |
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv deleted file mode 100644 index 80851111b3..0000000000 --- a/scripts/lib/recipetool/licenses.csv +++ /dev/null | |||
@@ -1,37 +0,0 @@ | |||
1 | 0636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only | ||
2 | 12f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only | ||
3 | 18810669f13b87348459e611d31ab760,GPL-2.0-only | ||
4 | 252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only | ||
5 | 2d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only | ||
6 | 3214f080875748938ba060314b4f727d,LGPL-2.0-only | ||
7 | 385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only | ||
8 | 393a5ca445f6965873eca0259a17f833,GPL-2.0-only | ||
9 | 3b83ef96387f14655fc854ddc3c6bd57,Apache-2.0 | ||
10 | 3bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only | ||
11 | 4325afd396febcb659c36b49533135d4,GPL-2.0-only | ||
12 | 4fbd65380cdd255951079008b364516c,LGPL-2.1-only | ||
13 | 54c7042be62e169199200bc6477f04d1,BSD-3-Clause | ||
14 | 55ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only | ||
15 | 59530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only | ||
16 | 5f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only | ||
17 | 6a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only | ||
18 | 751419260aa954499f7abaabaa882bbe,GPL-2.0-only | ||
19 | 7fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only | ||
20 | 8ca43cbc842c2336e835926c2166c28b,GPL-2.0-only | ||
21 | 94d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only | ||
22 | 9ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only | ||
23 | 9f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only | ||
24 | a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only | ||
25 | b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only | ||
26 | bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only | ||
27 | bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only | ||
28 | c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only | ||
29 | d32239bcb673463ab874e80d47fae504,GPL-3.0-only | ||
30 | d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only | ||
31 | d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only | ||
32 | db979804f025cf55aabec7129cb671ed,LGPL-2.0-only | ||
33 | eb723b61539feef013de476e68b5c50a,GPL-2.0-only | ||
34 | ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only | ||
35 | f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only | ||
36 | fad9b3332be894bab9bc501572864b29,LGPL-2.1-only | ||
37 | fbc093901857fcd118f065f900982c24,LGPL-2.1-only | ||
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py new file mode 100644 index 0000000000..c7a53dc550 --- /dev/null +++ b/scripts/lib/resulttool/junit.py | |||
@@ -0,0 +1,77 @@ | |||
1 | # resulttool - report test results in JUnit XML format | ||
2 | # | ||
3 | # Copyright (c) 2024, Siemens AG. | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | import os | ||
9 | import re | ||
10 | import xml.etree.ElementTree as ET | ||
11 | import resulttool.resultutils as resultutils | ||
12 | |||
13 | def junit(args, logger): | ||
14 | testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map) | ||
15 | |||
16 | total_time = 0 | ||
17 | skipped = 0 | ||
18 | failures = 0 | ||
19 | errors = 0 | ||
20 | |||
21 | for tests in testresults.values(): | ||
22 | results = tests[next(reversed(tests))].get("result", {}) | ||
23 | |||
24 | for result_id, result in results.items(): | ||
25 | # filter out ptestresult.rawlogs and ptestresult.sections | ||
26 | if re.search(r'\.test_', result_id): | ||
27 | total_time += result.get("duration", 0) | ||
28 | |||
29 | if result['status'] == "FAILED": | ||
30 | failures += 1 | ||
31 | elif result['status'] == "ERROR": | ||
32 | errors += 1 | ||
33 | elif result['status'] == "SKIPPED": | ||
34 | skipped += 1 | ||
35 | |||
36 | testsuites_node = ET.Element("testsuites") | ||
37 | testsuites_node.set("time", "%s" % total_time) | ||
38 | testsuite_node = ET.SubElement(testsuites_node, "testsuite") | ||
39 | testsuite_node.set("name", "Testimage") | ||
40 | testsuite_node.set("time", "%s" % total_time) | ||
41 | testsuite_node.set("tests", "%s" % len(results)) | ||
42 | testsuite_node.set("failures", "%s" % failures) | ||
43 | testsuite_node.set("errors", "%s" % errors) | ||
44 | testsuite_node.set("skipped", "%s" % skipped) | ||
45 | |||
46 | for result_id, result in results.items(): | ||
47 | if re.search(r'\.test_', result_id): | ||
48 | testcase_node = ET.SubElement(testsuite_node, "testcase", { | ||
49 | "name": result_id, | ||
50 | "classname": "Testimage", | ||
51 | "time": str(result['duration']) | ||
52 | }) | ||
53 | if result['status'] == "SKIPPED": | ||
54 | ET.SubElement(testcase_node, "skipped", message=result['log']) | ||
55 | elif result['status'] == "FAILED": | ||
56 | ET.SubElement(testcase_node, "failure", message=result['log']) | ||
57 | elif result['status'] == "ERROR": | ||
58 | ET.SubElement(testcase_node, "error", message=result['log']) | ||
59 | |||
60 | tree = ET.ElementTree(testsuites_node) | ||
61 | |||
62 | if args.junit_xml_path is None: | ||
63 | args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml' | ||
64 | tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True) | ||
65 | |||
66 | logger.info('Saved JUnit XML report as %s' % args.junit_xml_path) | ||
67 | |||
68 | def register_commands(subparsers): | ||
69 | """Register subcommands from this plugin""" | ||
70 | parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format', | ||
71 | description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.', | ||
72 | group='analysis') | ||
73 | parser_build.set_defaults(func=junit) | ||
74 | parser_build.add_argument('json_file', | ||
75 | help='json file should point to the testresults.json') | ||
76 | parser_build.add_argument('-j', '--junit_xml_path', | ||
77 | help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml') | ||
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py index ecb27c5933..ae0861ac6b 100755 --- a/scripts/lib/resulttool/manualexecution.py +++ b/scripts/lib/resulttool/manualexecution.py | |||
@@ -22,7 +22,7 @@ def load_json_file(f): | |||
22 | def write_json_file(f, json_data): | 22 | def write_json_file(f, json_data): |
23 | os.makedirs(os.path.dirname(f), exist_ok=True) | 23 | os.makedirs(os.path.dirname(f), exist_ok=True) |
24 | with open(f, 'w') as filedata: | 24 | with open(f, 'w') as filedata: |
25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=4)) | 25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=1)) |
26 | 26 | ||
27 | class ManualTestRunner(object): | 27 | class ManualTestRunner(object): |
28 | 28 | ||
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py index 10e7d13841..33b3119c54 100644 --- a/scripts/lib/resulttool/regression.py +++ b/scripts/lib/resulttool/regression.py | |||
@@ -212,6 +212,8 @@ def compare_result(logger, base_name, target_name, base_result, target_result, d | |||
212 | 212 | ||
213 | if base_result and target_result: | 213 | if base_result and target_result: |
214 | for k in base_result: | 214 | for k in base_result: |
215 | if k in ['ptestresult.rawlogs', 'ptestresult.sections']: | ||
216 | continue | ||
215 | base_testcase = base_result[k] | 217 | base_testcase = base_result[k] |
216 | base_status = base_testcase.get('status') | 218 | base_status = base_testcase.get('status') |
217 | if base_status: | 219 | if base_status: |
@@ -422,6 +424,7 @@ def register_commands(subparsers): | |||
422 | help='(optional) filter the base results to this result ID') | 424 | help='(optional) filter the base results to this result ID') |
423 | parser_build.add_argument('-t', '--target-result-id', default='', | 425 | parser_build.add_argument('-t', '--target-result-id', default='', |
424 | help='(optional) filter the target results to this result ID') | 426 | help='(optional) filter the target results to this result ID') |
427 | parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes") | ||
425 | 428 | ||
426 | parser_build = subparsers.add_parser('regression-git', help='regression git analysis', | 429 | parser_build = subparsers.add_parser('regression-git', help='regression git analysis', |
427 | description='regression analysis comparing base result set to target ' | 430 | description='regression analysis comparing base result set to target ' |
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py index a349510ab8..1c100b00ab 100644 --- a/scripts/lib/resulttool/report.py +++ b/scripts/lib/resulttool/report.py | |||
@@ -256,7 +256,7 @@ class ResultsTextReport(object): | |||
256 | if selected_test_case_only: | 256 | if selected_test_case_only: |
257 | print_selected_testcase_result(raw_results, selected_test_case_only) | 257 | print_selected_testcase_result(raw_results, selected_test_case_only) |
258 | else: | 258 | else: |
259 | print(json.dumps(raw_results, sort_keys=True, indent=4)) | 259 | print(json.dumps(raw_results, sort_keys=True, indent=1)) |
260 | else: | 260 | else: |
261 | print('Could not find raw test result for %s' % raw_test) | 261 | print('Could not find raw test result for %s' % raw_test) |
262 | return 0 | 262 | return 0 |
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py index c5521d81bd..b8fc79a6ac 100644 --- a/scripts/lib/resulttool/resultutils.py +++ b/scripts/lib/resulttool/resultutils.py | |||
@@ -14,8 +14,11 @@ import scriptpath | |||
14 | import copy | 14 | import copy |
15 | import urllib.request | 15 | import urllib.request |
16 | import posixpath | 16 | import posixpath |
17 | import logging | ||
17 | scriptpath.add_oe_lib_path() | 18 | scriptpath.add_oe_lib_path() |
18 | 19 | ||
20 | logger = logging.getLogger('resulttool') | ||
21 | |||
19 | flatten_map = { | 22 | flatten_map = { |
20 | "oeselftest": [], | 23 | "oeselftest": [], |
21 | "runtime": [], | 24 | "runtime": [], |
@@ -31,13 +34,19 @@ regression_map = { | |||
31 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] | 34 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] |
32 | } | 35 | } |
33 | store_map = { | 36 | store_map = { |
34 | "oeselftest": ['TEST_TYPE'], | 37 | "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'], |
35 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], | 38 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], |
36 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 39 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
37 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 40 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
38 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] | 41 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] |
39 | } | 42 | } |
40 | 43 | ||
44 | rawlog_sections = { | ||
45 | "ptestresult.rawlogs": "ptest", | ||
46 | "ltpresult.rawlogs": "ltp", | ||
47 | "ltpposixresult.rawlogs": "ltpposix" | ||
48 | } | ||
49 | |||
41 | def is_url(p): | 50 | def is_url(p): |
42 | """ | 51 | """ |
43 | Helper for determining if the given path is a URL | 52 | Helper for determining if the given path is a URL |
@@ -108,21 +117,57 @@ def filter_resultsdata(results, resultid): | |||
108 | newresults[r][i] = results[r][i] | 117 | newresults[r][i] = results[r][i] |
109 | return newresults | 118 | return newresults |
110 | 119 | ||
111 | def strip_ptestresults(results): | 120 | def strip_logs(results): |
112 | newresults = copy.deepcopy(results) | 121 | newresults = copy.deepcopy(results) |
113 | #for a in newresults2: | ||
114 | # newresults = newresults2[a] | ||
115 | for res in newresults: | 122 | for res in newresults: |
116 | if 'result' not in newresults[res]: | 123 | if 'result' not in newresults[res]: |
117 | continue | 124 | continue |
118 | if 'ptestresult.rawlogs' in newresults[res]['result']: | 125 | for logtype in rawlog_sections: |
119 | del newresults[res]['result']['ptestresult.rawlogs'] | 126 | if logtype in newresults[res]['result']: |
127 | del newresults[res]['result'][logtype] | ||
120 | if 'ptestresult.sections' in newresults[res]['result']: | 128 | if 'ptestresult.sections' in newresults[res]['result']: |
121 | for i in newresults[res]['result']['ptestresult.sections']: | 129 | for i in newresults[res]['result']['ptestresult.sections']: |
122 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: | 130 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: |
123 | del newresults[res]['result']['ptestresult.sections'][i]['log'] | 131 | del newresults[res]['result']['ptestresult.sections'][i]['log'] |
124 | return newresults | 132 | return newresults |
125 | 133 | ||
134 | # For timing numbers, crazy amounts of precision don't make sense and just confuse | ||
135 | # the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1, | ||
136 | # trim to 4 significant digits | ||
137 | def trim_durations(results): | ||
138 | for res in results: | ||
139 | if 'result' not in results[res]: | ||
140 | continue | ||
141 | for entry in results[res]['result']: | ||
142 | if 'duration' in results[res]['result'][entry]: | ||
143 | duration = results[res]['result'][entry]['duration'] | ||
144 | if duration > 1: | ||
145 | results[res]['result'][entry]['duration'] = float("%.3f" % duration) | ||
146 | elif duration < 1: | ||
147 | results[res]['result'][entry]['duration'] = float("%.4g" % duration) | ||
148 | return results | ||
149 | |||
150 | def handle_cleanups(results): | ||
151 | # Remove pointless path duplication from old format reproducibility results | ||
152 | for res2 in results: | ||
153 | try: | ||
154 | section = results[res2]['result']['reproducible']['files'] | ||
155 | for pkgtype in section: | ||
156 | for filelist in section[pkgtype].copy(): | ||
157 | if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict: | ||
158 | newlist = [] | ||
159 | for entry in section[pkgtype][filelist]: | ||
160 | newlist.append(entry["reference"].split("/./")[1]) | ||
161 | section[pkgtype][filelist] = newlist | ||
162 | |||
163 | except KeyError: | ||
164 | pass | ||
165 | # Remove pointless duplicate rawlogs data | ||
166 | try: | ||
167 | del results[res2]['result']['reproducible.rawlogs'] | ||
168 | except KeyError: | ||
169 | pass | ||
170 | |||
126 | def decode_log(logdata): | 171 | def decode_log(logdata): |
127 | if isinstance(logdata, str): | 172 | if isinstance(logdata, str): |
128 | return logdata | 173 | return logdata |
@@ -155,9 +200,6 @@ def generic_get_rawlogs(sectname, results): | |||
155 | return None | 200 | return None |
156 | return decode_log(results[sectname]['log']) | 201 | return decode_log(results[sectname]['log']) |
157 | 202 | ||
158 | def ptestresult_get_rawlogs(results): | ||
159 | return generic_get_rawlogs('ptestresult.rawlogs', results) | ||
160 | |||
161 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): | 203 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): |
162 | for res in results: | 204 | for res in results: |
163 | if res: | 205 | if res: |
@@ -167,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p | |||
167 | os.makedirs(os.path.dirname(dst), exist_ok=True) | 209 | os.makedirs(os.path.dirname(dst), exist_ok=True) |
168 | resultsout = results[res] | 210 | resultsout = results[res] |
169 | if not ptestjson: | 211 | if not ptestjson: |
170 | resultsout = strip_ptestresults(results[res]) | 212 | resultsout = strip_logs(results[res]) |
213 | trim_durations(resultsout) | ||
214 | handle_cleanups(resultsout) | ||
171 | with open(dst, 'w') as f: | 215 | with open(dst, 'w') as f: |
172 | f.write(json.dumps(resultsout, sort_keys=True, indent=4)) | 216 | f.write(json.dumps(resultsout, sort_keys=True, indent=1)) |
173 | for res2 in results[res]: | 217 | for res2 in results[res]: |
174 | if ptestlogs and 'result' in results[res][res2]: | 218 | if ptestlogs and 'result' in results[res][res2]: |
175 | seriesresults = results[res][res2]['result'] | 219 | seriesresults = results[res][res2]['result'] |
176 | rawlogs = ptestresult_get_rawlogs(seriesresults) | 220 | for logtype in rawlog_sections: |
177 | if rawlogs is not None: | 221 | logdata = generic_get_rawlogs(logtype, seriesresults) |
178 | with open(dst.replace(fn, "ptest-raw.log"), "w+") as f: | 222 | if logdata is not None: |
179 | f.write(rawlogs) | 223 | logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log") |
224 | with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f: | ||
225 | f.write(logdata) | ||
180 | if 'ptestresult.sections' in seriesresults: | 226 | if 'ptestresult.sections' in seriesresults: |
181 | for i in seriesresults['ptestresult.sections']: | 227 | for i in seriesresults['ptestresult.sections']: |
182 | sectionlog = ptestresult_get_log(seriesresults, i) | 228 | sectionlog = ptestresult_get_log(seriesresults, i) |
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py index e0951f0a8f..b143334e69 100644 --- a/scripts/lib/resulttool/store.py +++ b/scripts/lib/resulttool/store.py | |||
@@ -65,18 +65,35 @@ def store(args, logger): | |||
65 | 65 | ||
66 | for r in revisions: | 66 | for r in revisions: |
67 | results = revisions[r] | 67 | results = revisions[r] |
68 | if args.revision and r[0] != args.revision: | ||
69 | logger.info('skipping %s as non-matching' % r[0]) | ||
70 | continue | ||
68 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} | 71 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} |
69 | subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"]) | 72 | subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"]) |
70 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) | 73 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) |
71 | 74 | ||
72 | logger.info('Storing test result into git repository %s' % args.git_dir) | 75 | logger.info('Storing test result into git repository %s' % args.git_dir) |
73 | 76 | ||
74 | gitarchive.gitarchive(tempdir, args.git_dir, False, False, | 77 | excludes = [] |
78 | if args.logfile_archive: | ||
79 | excludes = ['*.log', "*.log.zst"] | ||
80 | |||
81 | tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False, | ||
75 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", | 82 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", |
76 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", | 83 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", |
77 | 'Test run #{tag_number} of {branch}:{commit}', '', | 84 | 'Test run #{tag_number} of {branch}:{commit}', '', |
78 | [], [], False, keywords, logger) | 85 | excludes, [], False, keywords, logger) |
79 | 86 | ||
87 | if args.logfile_archive: | ||
88 | logdir = args.logfile_archive + "/" + tagname | ||
89 | shutil.copytree(tempdir, logdir) | ||
90 | os.chmod(logdir, 0o755) | ||
91 | for root, dirs, files in os.walk(logdir): | ||
92 | for name in files: | ||
93 | if not name.endswith(".log"): | ||
94 | continue | ||
95 | f = os.path.join(root, name) | ||
96 | subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True) | ||
80 | finally: | 97 | finally: |
81 | subprocess.check_call(["rm", "-rf", tempdir]) | 98 | subprocess.check_call(["rm", "-rf", tempdir]) |
82 | 99 | ||
@@ -102,3 +119,7 @@ def register_commands(subparsers): | |||
102 | help='add executed-by configuration to each result file') | 119 | help='add executed-by configuration to each result file') |
103 | parser_build.add_argument('-t', '--extra-test-env', default='', | 120 | parser_build.add_argument('-t', '--extra-test-env', default='', |
104 | help='add extra test environment data to each result file configuration') | 121 | help='add extra test environment data to each result file configuration') |
122 | parser_build.add_argument('-r', '--revision', default='', | ||
123 | help='only store data for the specified revision') | ||
124 | parser_build.add_argument('-l', '--logfile-archive', default='', | ||
125 | help='directory to separately archive log files along with a copy of the results') | ||
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index f23e53cba9..32e749dbb1 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
@@ -179,8 +179,13 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
179 | f.write('SRCREV = "%s"\n' % srcrev) | 179 | f.write('SRCREV = "%s"\n' % srcrev) |
180 | f.write('PV = "0.0+"\n') | 180 | f.write('PV = "0.0+"\n') |
181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) | 181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) |
182 | f.write('UNPACKDIR = "%s"\n' % destdir) | ||
183 | |||
182 | # Set S out of the way so it doesn't get created under the workdir | 184 | # Set S out of the way so it doesn't get created under the workdir |
183 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) | 185 | s_dir = os.path.join(tmpdir, 'emptysrc') |
186 | bb.utils.mkdirhier(s_dir) | ||
187 | f.write('S = "%s"\n' % s_dir) | ||
188 | |||
184 | if not mirrors: | 189 | if not mirrors: |
185 | # We do not need PREMIRRORS since we are almost certainly | 190 | # We do not need PREMIRRORS since we are almost certainly |
186 | # fetching new source rather than something that has already | 191 | # fetching new source rather than something that has already |
@@ -232,10 +237,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
232 | if e.errno != errno.ENOTEMPTY: | 237 | if e.errno != errno.ENOTEMPTY: |
233 | raise | 238 | raise |
234 | 239 | ||
235 | bb.utils.mkdirhier(destdir) | ||
236 | for fn in os.listdir(tmpworkdir): | ||
237 | shutil.move(os.path.join(tmpworkdir, fn), destdir) | ||
238 | |||
239 | finally: | 240 | finally: |
240 | if not preserve_tmp: | 241 | if not preserve_tmp: |
241 | shutil.rmtree(tmpdir) | 242 | shutil.rmtree(tmpdir) |
@@ -271,12 +272,3 @@ def is_src_url(param): | |||
271 | return True | 272 | return True |
272 | return False | 273 | return False |
273 | 274 | ||
274 | def filter_src_subdirs(pth): | ||
275 | """ | ||
276 | Filter out subdirectories of initial unpacked source trees that we do not care about. | ||
277 | Used by devtool and recipetool. | ||
278 | """ | ||
279 | dirlist = os.listdir(pth) | ||
280 | filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa'] | ||
281 | dirlist = [x for x in dirlist if x not in filterout] | ||
282 | return dirlist | ||
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc index 89880b417b..4a440ddafe 100644 --- a/scripts/lib/wic/canned-wks/common.wks.inc +++ b/scripts/lib/wic/canned-wks/common.wks.inc | |||
@@ -1,3 +1,3 @@ | |||
1 | # This file is included into 3 canned wks files from this directory | 1 | # This file is included into 3 canned wks files from this directory |
2 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 2 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
3 | part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 | 3 | part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 |
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks index 8d7d8de6ea..cb640056f1 100644 --- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks +++ b/scripts/lib/wic/canned-wks/directdisk-gpt.wks | |||
@@ -3,7 +3,7 @@ | |||
3 | # can directly dd to boot media. | 3 | # can directly dd to boot media. |
4 | 4 | ||
5 | 5 | ||
6 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 6 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
9 | bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" | 9 | bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" |
diff --git a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks index f61d941d6d..4fd1999ffb 100644 --- a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks +++ b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks | |||
@@ -15,7 +15,7 @@ | |||
15 | # | 15 | # |
16 | # - or any combinations of -r and --rootfs command line options | 16 | # - or any combinations of -r and --rootfs command line options |
17 | 17 | ||
18 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 18 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
19 | part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 | 19 | part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 |
20 | part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 | 20 | part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 |
21 | 21 | ||
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in index 2fd286ff98..5211972955 100644 --- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in +++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in | |||
@@ -1,3 +1,3 @@ | |||
1 | bootloader --ptable gpt | 1 | bootloader --ptable gpt |
2 | part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.1 | 2 | part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2 |
3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ | 3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ |
diff --git a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in new file mode 100644 index 0000000000..cac0fa32cd --- /dev/null +++ b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in | |||
@@ -0,0 +1,3 @@ | |||
1 | bootloader --ptable gpt --timeout=5 | ||
2 | part /boot --source bootimg_efi --sourceparams="loader=${EFI_PROVIDER}" --label boot --active --align 1024 --use-uuid --part-name="ESP" --part-type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B --overhead-factor=1 | ||
3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ | ||
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks index 9f534fe184..16dfe76dfe 100644 --- a/scripts/lib/wic/canned-wks/mkefidisk.wks +++ b/scripts/lib/wic/canned-wks/mkefidisk.wks | |||
@@ -2,10 +2,10 @@ | |||
2 | # long-description: Creates a partitioned EFI disk image that the user | 2 | # long-description: Creates a partitioned EFI disk image that the user |
3 | # can directly dd to boot media. | 3 | # can directly dd to boot media. |
4 | 4 | ||
5 | part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 | 5 | part /boot --source bootimg_efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 |
6 | 6 | ||
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
9 | part swap --ondisk sda --size 44 --label swap1 --fstype=swap | 9 | part swap --ondisk sda --size 44 --label swap1 --fstype=swap |
10 | 10 | ||
11 | bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0" | 11 | bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=${KERNEL_CONSOLE} console=tty0" |
diff --git a/scripts/lib/wic/canned-wks/mkhybridiso.wks b/scripts/lib/wic/canned-wks/mkhybridiso.wks index 48c5ac4791..c3a030e5b4 100644 --- a/scripts/lib/wic/canned-wks/mkhybridiso.wks +++ b/scripts/lib/wic/canned-wks/mkhybridiso.wks | |||
@@ -2,6 +2,6 @@ | |||
2 | # long-description: Creates an EFI and legacy bootable hybrid ISO image | 2 | # long-description: Creates an EFI and legacy bootable hybrid ISO image |
3 | # which can be used on optical media as well as USB media. | 3 | # which can be used on optical media as well as USB media. |
4 | 4 | ||
5 | part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO | 5 | part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO |
6 | 6 | ||
7 | bootloader --timeout=15 --append="" | 7 | bootloader --timeout=15 --append="" |
diff --git a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks index 63bc4dab6a..f9f8044f7d 100644 --- a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks +++ b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks | |||
@@ -2,5 +2,5 @@ | |||
2 | # long-description: Creates a partitioned SD card image. Boot files | 2 | # long-description: Creates a partitioned SD card image. Boot files |
3 | # are located in the first vfat partition. | 3 | # are located in the first vfat partition. |
4 | 4 | ||
5 | part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 | 5 | part /boot --source bootimg_partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 |
6 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 | 6 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 |
diff --git a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks index 95d7b97a60..3fb2c0e35f 100644 --- a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks +++ b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks | |||
@@ -2,7 +2,7 @@ | |||
2 | # long-description: Creates a partitioned EFI disk image that the user | 2 | # long-description: Creates a partitioned EFI disk image that the user |
3 | # can directly dd to boot media. The selected bootloader is systemd-boot. | 3 | # can directly dd to boot media. The selected bootloader is systemd-boot. |
4 | 4 | ||
5 | part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid | 5 | part /boot --source bootimg_efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid |
6 | 6 | ||
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py index 674ccfc244..b9e60cbe4e 100644 --- a/scripts/lib/wic/engine.py +++ b/scripts/lib/wic/engine.py | |||
@@ -180,6 +180,8 @@ def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir, | |||
180 | os.makedirs(options.outdir) | 180 | os.makedirs(options.outdir) |
181 | 181 | ||
182 | pname = options.imager | 182 | pname = options.imager |
183 | # Don't support '-' in plugin names | ||
184 | pname = pname.replace("-", "_") | ||
183 | plugin_class = PluginMgr.get_plugins('imager').get(pname) | 185 | plugin_class = PluginMgr.get_plugins('imager').get(pname) |
184 | if not plugin_class: | 186 | if not plugin_class: |
185 | raise WicError('Unknown plugin: %s' % pname) | 187 | raise WicError('Unknown plugin: %s' % pname) |
@@ -232,6 +234,16 @@ class Disk: | |||
232 | self._psector_size = None | 234 | self._psector_size = None |
233 | self._ptable_format = None | 235 | self._ptable_format = None |
234 | 236 | ||
237 | # define sector size | ||
238 | sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE') | ||
239 | if sector_size_str is not None: | ||
240 | try: | ||
241 | self.sector_size = int(sector_size_str) | ||
242 | except ValueError: | ||
243 | self.sector_size = None | ||
244 | else: | ||
245 | self.sector_size = None | ||
246 | |||
235 | # find parted | 247 | # find parted |
236 | # read paths from $PATH environment variable | 248 | # read paths from $PATH environment variable |
237 | # if it fails, use hardcoded paths | 249 | # if it fails, use hardcoded paths |
@@ -258,7 +270,13 @@ class Disk: | |||
258 | def get_partitions(self): | 270 | def get_partitions(self): |
259 | if self._partitions is None: | 271 | if self._partitions is None: |
260 | self._partitions = OrderedDict() | 272 | self._partitions = OrderedDict() |
261 | out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) | 273 | |
274 | if self.sector_size is not None: | ||
275 | out = exec_cmd("export PARTED_SECTOR_SIZE=%d; %s -sm %s unit B print" % \ | ||
276 | (self.sector_size, self.parted, self.imagepath), True) | ||
277 | else: | ||
278 | out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) | ||
279 | |||
262 | parttype = namedtuple("Part", "pnum start end size fstype") | 280 | parttype = namedtuple("Part", "pnum start end size fstype") |
263 | splitted = out.splitlines() | 281 | splitted = out.splitlines() |
264 | # skip over possible errors in exec_cmd output | 282 | # skip over possible errors in exec_cmd output |
@@ -359,7 +377,7 @@ class Disk: | |||
359 | Remove files/dirs and their contents from the partition. | 377 | Remove files/dirs and their contents from the partition. |
360 | This only applies to ext* partition. | 378 | This only applies to ext* partition. |
361 | """ | 379 | """ |
362 | abs_path = re.sub('\/\/+', '/', path) | 380 | abs_path = re.sub(r'\/\/+', '/', path) |
363 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, | 381 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, |
364 | self._get_part_image(pnum), | 382 | self._get_part_image(pnum), |
365 | abs_path) | 383 | abs_path) |
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py index 163535e431..2e3061f343 100644 --- a/scripts/lib/wic/help.py +++ b/scripts/lib/wic/help.py | |||
@@ -544,18 +544,18 @@ DESCRIPTION | |||
544 | the --source param given to that partition. For example, if the | 544 | the --source param given to that partition. For example, if the |
545 | partition is set up like this: | 545 | partition is set up like this: |
546 | 546 | ||
547 | part /boot --source bootimg-pcbios ... | 547 | part /boot --source bootimg_pcbios ... |
548 | 548 | ||
549 | then the methods defined as class members of the plugin having the | 549 | then the methods defined as class members of the plugin having the |
550 | matching bootimg-pcbios .name class member would be used. | 550 | matching bootimg_pcbios .name class member would be used. |
551 | 551 | ||
552 | To be more concrete, here's the plugin definition that would match | 552 | To be more concrete, here's the plugin definition that would match |
553 | a '--source bootimg-pcbios' usage, along with an example method | 553 | a '--source bootimg_pcbios' usage, along with an example method |
554 | that would be called by the wic implementation when it needed to | 554 | that would be called by the wic implementation when it needed to |
555 | invoke an implementation-specific partition-preparation function: | 555 | invoke an implementation-specific partition-preparation function: |
556 | 556 | ||
557 | class BootimgPcbiosPlugin(SourcePlugin): | 557 | class BootimgPcbiosPlugin(SourcePlugin): |
558 | name = 'bootimg-pcbios' | 558 | name = 'bootimg_pcbios' |
559 | 559 | ||
560 | @classmethod | 560 | @classmethod |
561 | def do_prepare_partition(self, part, ...) | 561 | def do_prepare_partition(self, part, ...) |
@@ -794,7 +794,7 @@ DESCRIPTION | |||
794 | 794 | ||
795 | Here is a content of test.wks: | 795 | Here is a content of test.wks: |
796 | 796 | ||
797 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 797 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
798 | part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 | 798 | part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 |
799 | 799 | ||
800 | bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" | 800 | bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" |
@@ -916,6 +916,10 @@ DESCRIPTION | |||
916 | will create empty partition. --size parameter has | 916 | will create empty partition. --size parameter has |
917 | to be used to specify size of empty partition. | 917 | to be used to specify size of empty partition. |
918 | 918 | ||
919 | --sourceparams: This option is specific to wic. Supply additional | ||
920 | parameters to the source plugin in | ||
921 | key1=value1,key2 format. | ||
922 | |||
919 | --ondisk or --ondrive: Forces the partition to be created on | 923 | --ondisk or --ondrive: Forces the partition to be created on |
920 | a particular disk. | 924 | a particular disk. |
921 | 925 | ||
@@ -932,6 +936,7 @@ DESCRIPTION | |||
932 | squashfs | 936 | squashfs |
933 | erofs | 937 | erofs |
934 | swap | 938 | swap |
939 | none | ||
935 | 940 | ||
936 | --fsoptions: Specifies a free-form string of options to be | 941 | --fsoptions: Specifies a free-form string of options to be |
937 | used when mounting the filesystem. This string | 942 | used when mounting the filesystem. This string |
@@ -965,6 +970,14 @@ DESCRIPTION | |||
965 | to start a partition on an x KBytes | 970 | to start a partition on an x KBytes |
966 | boundary. | 971 | boundary. |
967 | 972 | ||
973 | --offset: This option is specific to wic that says to place a partition | ||
974 | at exactly the specified offset. If the partition cannot be | ||
975 | placed at the specified offset, the image build will fail. | ||
976 | Specify as an integer value optionally followed by one of the | ||
977 | units s/S for 512 byte sector, k/K for kibibyte, M for | ||
978 | mebibyte and G for gibibyte. The default unit if none is | ||
979 | given is k. | ||
980 | |||
968 | --no-table: This option is specific to wic. Space will be | 981 | --no-table: This option is specific to wic. Space will be |
969 | reserved for the partition and it will be | 982 | reserved for the partition and it will be |
970 | populated but it will not be added to the | 983 | populated but it will not be added to the |
@@ -1045,6 +1058,18 @@ DESCRIPTION | |||
1045 | not take effect when --mkfs-extraopts is used. This should be taken into | 1058 | not take effect when --mkfs-extraopts is used. This should be taken into |
1046 | account when using --mkfs-extraopts. | 1059 | account when using --mkfs-extraopts. |
1047 | 1060 | ||
1061 | --type: This option is specific to wic. Valid values are 'primary', | ||
1062 | 'logical'. For msdos partition tables, this option specifies | ||
1063 | the partition type. | ||
1064 | |||
1065 | --hidden: This option is specific to wic. This option sets the | ||
1066 | RequiredPartition bit (bit 0) on GPT partitions. | ||
1067 | |||
1068 | --mbr: This option is specific to wic. This option is used with the | ||
1069 | gpt-hybrid partition type that uses both a GPT partition and | ||
1070 | an MBR header. Partitions with this flag will be included in | ||
1071 | this MBR header. | ||
1072 | |||
1048 | * bootloader | 1073 | * bootloader |
1049 | 1074 | ||
1050 | This command allows the user to specify various bootloader | 1075 | This command allows the user to specify various bootloader |
@@ -1063,6 +1088,13 @@ DESCRIPTION | |||
1063 | file. Using this option will override any other | 1088 | file. Using this option will override any other |
1064 | bootloader option. | 1089 | bootloader option. |
1065 | 1090 | ||
1091 | --ptable: Specifies the partition table format. Valid values are | ||
1092 | 'msdos', 'gpt', 'gpt-hybrid'. | ||
1093 | |||
1094 | --source: Specifies the source plugin. If not specified, the | ||
1095 | --source value will be copied from the partition that has | ||
1096 | /boot as mountpoint. | ||
1097 | |||
1066 | Note that bootloader functionality and boot partitions are | 1098 | Note that bootloader functionality and boot partitions are |
1067 | implemented by the various --source plugins that implement | 1099 | implemented by the various --source plugins that implement |
1068 | bootloader functionality; the bootloader command essentially | 1100 | bootloader functionality; the bootloader command essentially |
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py index 795707ec5d..b34691d313 100644 --- a/scripts/lib/wic/partition.py +++ b/scripts/lib/wic/partition.py | |||
@@ -164,6 +164,9 @@ class Partition(): | |||
164 | 164 | ||
165 | plugins = PluginMgr.get_plugins('source') | 165 | plugins = PluginMgr.get_plugins('source') |
166 | 166 | ||
167 | # Don't support '-' in plugin names | ||
168 | self.source = self.source.replace("-", "_") | ||
169 | |||
167 | if self.source not in plugins: | 170 | if self.source not in plugins: |
168 | raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" | 171 | raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" |
169 | "See 'wic list source-plugins' for a list of available" | 172 | "See 'wic list source-plugins' for a list of available" |
@@ -178,7 +181,7 @@ class Partition(): | |||
178 | splitted = self.sourceparams.split(',') | 181 | splitted = self.sourceparams.split(',') |
179 | srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) | 182 | srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) |
180 | 183 | ||
181 | plugin = PluginMgr.get_plugins('source')[self.source] | 184 | plugin = plugins[self.source] |
182 | plugin.do_configure_partition(self, srcparams_dict, creator, | 185 | plugin.do_configure_partition(self, srcparams_dict, creator, |
183 | cr_workdir, oe_builddir, bootimg_dir, | 186 | cr_workdir, oe_builddir, bootimg_dir, |
184 | kernel_dir, native_sysroot) | 187 | kernel_dir, native_sysroot) |
@@ -222,19 +225,19 @@ class Partition(): | |||
222 | if (pseudo_dir): | 225 | if (pseudo_dir): |
223 | # Canonicalize the ignore paths. This corresponds to | 226 | # Canonicalize the ignore paths. This corresponds to |
224 | # calling oe.path.canonicalize(), which is used in bitbake.conf. | 227 | # calling oe.path.canonicalize(), which is used in bitbake.conf. |
225 | ignore_paths = [rootfs] + (get_bitbake_var("PSEUDO_IGNORE_PATHS") or "").split(",") | 228 | include_paths = [rootfs_dir] + (get_bitbake_var("PSEUDO_INCLUDE_PATHS") or "").split(",") |
226 | canonical_paths = [] | 229 | canonical_paths = [] |
227 | for path in ignore_paths: | 230 | for path in include_paths: |
228 | if "$" not in path: | 231 | if "$" not in path: |
229 | trailing_slash = path.endswith("/") and "/" or "" | 232 | trailing_slash = path.endswith("/") and "/" or "" |
230 | canonical_paths.append(os.path.realpath(path) + trailing_slash) | 233 | canonical_paths.append(os.path.realpath(path) + trailing_slash) |
231 | ignore_paths = ",".join(canonical_paths) | 234 | include_paths = ",".join(canonical_paths) |
232 | 235 | ||
233 | pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix | 236 | pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix |
234 | pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir | 237 | pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir |
235 | pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir | 238 | pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir |
236 | pseudo += "export PSEUDO_NOSYMLINKEXP=1;" | 239 | pseudo += "export PSEUDO_NOSYMLINKEXP=1;" |
237 | pseudo += "export PSEUDO_IGNORE_PATHS=%s;" % ignore_paths | 240 | pseudo += "export PSEUDO_INCLUDE_PATHS=%s;" % include_paths |
238 | pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") | 241 | pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") |
239 | else: | 242 | else: |
240 | pseudo = None | 243 | pseudo = None |
@@ -244,7 +247,7 @@ class Partition(): | |||
244 | # from bitbake variable | 247 | # from bitbake variable |
245 | rsize_bb = get_bitbake_var('ROOTFS_SIZE') | 248 | rsize_bb = get_bitbake_var('ROOTFS_SIZE') |
246 | rdir = get_bitbake_var('IMAGE_ROOTFS') | 249 | rdir = get_bitbake_var('IMAGE_ROOTFS') |
247 | if rsize_bb and rdir == rootfs_dir: | 250 | if rsize_bb and (rdir == rootfs_dir or (rootfs_dir.split('/')[-2] == "tmp-wic" and rootfs_dir.split('/')[-1][:6] == "rootfs")): |
248 | # Bitbake variable ROOTFS_SIZE is calculated in | 251 | # Bitbake variable ROOTFS_SIZE is calculated in |
249 | # Image._get_rootfs_size method from meta/lib/oe/image.py | 252 | # Image._get_rootfs_size method from meta/lib/oe/image.py |
250 | # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, | 253 | # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, |
@@ -284,19 +287,8 @@ class Partition(): | |||
284 | 287 | ||
285 | extraopts = self.mkfs_extraopts or "-F -i 8192" | 288 | extraopts = self.mkfs_extraopts or "-F -i 8192" |
286 | 289 | ||
287 | if os.getenv('SOURCE_DATE_EPOCH'): | 290 | # use hash_seed to generate reproducible ext4 images |
288 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | 291 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo) |
289 | if pseudo: | ||
290 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
291 | else: | ||
292 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
293 | |||
294 | # Set hash_seed to generate deterministic directory indexes | ||
295 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
296 | if self.fsuuid: | ||
297 | namespace = uuid.UUID(self.fsuuid) | ||
298 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
299 | extraopts += " -E hash_seed=%s" % hash_seed | ||
300 | 292 | ||
301 | label_str = "" | 293 | label_str = "" |
302 | if self.label: | 294 | if self.label: |
@@ -344,6 +336,23 @@ class Partition(): | |||
344 | 336 | ||
345 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 337 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
346 | 338 | ||
339 | def get_hash_seed_ext4(self, extraopts, pseudo): | ||
340 | if os.getenv('SOURCE_DATE_EPOCH'): | ||
341 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | ||
342 | if pseudo: | ||
343 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
344 | else: | ||
345 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
346 | |||
347 | # Set hash_seed to generate deterministic directory indexes | ||
348 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
349 | if self.fsuuid: | ||
350 | namespace = uuid.UUID(self.fsuuid) | ||
351 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
352 | extraopts += " -E hash_seed=%s" % hash_seed | ||
353 | |||
354 | return (extraopts, pseudo) | ||
355 | |||
347 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, | 356 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, |
348 | native_sysroot, pseudo): | 357 | native_sysroot, pseudo): |
349 | """ | 358 | """ |
@@ -437,13 +446,16 @@ class Partition(): | |||
437 | 446 | ||
438 | extraopts = self.mkfs_extraopts or "-i 8192" | 447 | extraopts = self.mkfs_extraopts or "-i 8192" |
439 | 448 | ||
449 | # use hash_seed to generate reproducible ext4 images | ||
450 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None) | ||
451 | |||
440 | label_str = "" | 452 | label_str = "" |
441 | if self.label: | 453 | if self.label: |
442 | label_str = "-L %s" % self.label | 454 | label_str = "-L %s" % self.label |
443 | 455 | ||
444 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ | 456 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ |
445 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) | 457 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) |
446 | exec_native_cmd(mkfs_cmd, native_sysroot) | 458 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) |
447 | 459 | ||
448 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 460 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
449 | 461 | ||
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py index b64568339b..640da292d3 100644 --- a/scripts/lib/wic/pluginbase.py +++ b/scripts/lib/wic/pluginbase.py | |||
@@ -44,7 +44,7 @@ class PluginMgr: | |||
44 | path = os.path.join(layer_path, script_plugin_dir) | 44 | path = os.path.join(layer_path, script_plugin_dir) |
45 | path = os.path.abspath(os.path.expanduser(path)) | 45 | path = os.path.abspath(os.path.expanduser(path)) |
46 | if path not in cls._plugin_dirs and os.path.isdir(path): | 46 | if path not in cls._plugin_dirs and os.path.isdir(path): |
47 | cls._plugin_dirs.insert(0, path) | 47 | cls._plugin_dirs.append(path) |
48 | 48 | ||
49 | if ptype not in PLUGINS: | 49 | if ptype not in PLUGINS: |
50 | # load all ptype plugins | 50 | # load all ptype plugins |
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py index a1d152659b..6e1f1c8cba 100644 --- a/scripts/lib/wic/plugins/imager/direct.py +++ b/scripts/lib/wic/plugins/imager/direct.py | |||
@@ -203,6 +203,8 @@ class DirectPlugin(ImagerPlugin): | |||
203 | source_plugin = self.ks.bootloader.source | 203 | source_plugin = self.ks.bootloader.source |
204 | disk_name = self.parts[0].disk | 204 | disk_name = self.parts[0].disk |
205 | if source_plugin: | 205 | if source_plugin: |
206 | # Don't support '-' in plugin names | ||
207 | source_plugin = source_plugin.replace("-", "_") | ||
206 | plugin = PluginMgr.get_plugins('source')[source_plugin] | 208 | plugin = PluginMgr.get_plugins('source')[source_plugin] |
207 | plugin.do_install_disk(self._image, disk_name, self, self.workdir, | 209 | plugin.do_install_disk(self._image, disk_name, self, self.workdir, |
208 | self.oe_builddir, self.bootimg_dir, | 210 | self.oe_builddir, self.bootimg_dir, |
@@ -321,7 +323,15 @@ class PartitionedImage(): | |||
321 | self.partitions = partitions | 323 | self.partitions = partitions |
322 | self.partimages = [] | 324 | self.partimages = [] |
323 | # Size of a sector used in calculations | 325 | # Size of a sector used in calculations |
324 | self.sector_size = SECTOR_SIZE | 326 | sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE') |
327 | if sector_size_str is not None: | ||
328 | try: | ||
329 | self.sector_size = int(sector_size_str) | ||
330 | except ValueError: | ||
331 | self.sector_size = SECTOR_SIZE | ||
332 | else: | ||
333 | self.sector_size = SECTOR_SIZE | ||
334 | |||
325 | self.native_sysroot = native_sysroot | 335 | self.native_sysroot = native_sysroot |
326 | num_real_partitions = len([p for p in self.partitions if not p.no_table]) | 336 | num_real_partitions = len([p for p in self.partitions if not p.no_table]) |
327 | self.extra_space = extra_space | 337 | self.extra_space = extra_space |
@@ -508,7 +518,8 @@ class PartitionedImage(): | |||
508 | logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", | 518 | logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", |
509 | parttype, start, end, size) | 519 | parttype, start, end, size) |
510 | 520 | ||
511 | cmd = "parted -s %s unit s mkpart %s" % (device, parttype) | 521 | cmd = "export PARTED_SECTOR_SIZE=%d; parted -s %s unit s mkpart %s" % \ |
522 | (self.sector_size, device, parttype) | ||
512 | if fstype: | 523 | if fstype: |
513 | cmd += " %s" % fstype | 524 | cmd += " %s" % fstype |
514 | cmd += " %d %d" % (start, end) | 525 | cmd += " %d %d" % (start, end) |
@@ -527,8 +538,8 @@ class PartitionedImage(): | |||
527 | os.ftruncate(sparse.fileno(), min_size) | 538 | os.ftruncate(sparse.fileno(), min_size) |
528 | 539 | ||
529 | logger.debug("Initializing partition table for %s", device) | 540 | logger.debug("Initializing partition table for %s", device) |
530 | exec_native_cmd("parted -s %s mklabel %s" % (device, ptable_format), | 541 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s mklabel %s" % |
531 | self.native_sysroot) | 542 | (self.sector_size, device, ptable_format), self.native_sysroot) |
532 | 543 | ||
533 | def _write_disk_guid(self): | 544 | def _write_disk_guid(self): |
534 | if self.ptable_format in ('gpt', 'gpt-hybrid'): | 545 | if self.ptable_format in ('gpt', 'gpt-hybrid'): |
@@ -538,7 +549,8 @@ class PartitionedImage(): | |||
538 | self.disk_guid = uuid.uuid4() | 549 | self.disk_guid = uuid.uuid4() |
539 | 550 | ||
540 | logger.debug("Set disk guid %s", self.disk_guid) | 551 | logger.debug("Set disk guid %s", self.disk_guid) |
541 | sfdisk_cmd = "sfdisk --disk-id %s %s" % (self.path, self.disk_guid) | 552 | sfdisk_cmd = "sfdisk --sector-size %s --disk-id %s %s" % \ |
553 | (self.sector_size, self.path, self.disk_guid) | ||
542 | exec_native_cmd(sfdisk_cmd, self.native_sysroot) | 554 | exec_native_cmd(sfdisk_cmd, self.native_sysroot) |
543 | 555 | ||
544 | def create(self): | 556 | def create(self): |
@@ -613,45 +625,44 @@ class PartitionedImage(): | |||
613 | partition_label = part.part_name if part.part_name else part.label | 625 | partition_label = part.part_name if part.part_name else part.label |
614 | logger.debug("partition %d: set name to %s", | 626 | logger.debug("partition %d: set name to %s", |
615 | part.num, partition_label) | 627 | part.num, partition_label) |
616 | exec_native_cmd("sgdisk --change-name=%d:%s %s" % \ | 628 | exec_native_cmd("sfdisk --sector-size %s --part-label %s %d %s" % \ |
617 | (part.num, partition_label, | 629 | (self.sector_size, self.path, part.num, |
618 | self.path), self.native_sysroot) | 630 | partition_label), self.native_sysroot) |
619 | |||
620 | if part.part_type: | 631 | if part.part_type: |
621 | logger.debug("partition %d: set type UID to %s", | 632 | logger.debug("partition %d: set type UID to %s", |
622 | part.num, part.part_type) | 633 | part.num, part.part_type) |
623 | exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ | 634 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %d %s" % \ |
624 | (part.num, part.part_type, | 635 | (self.sector_size, self.path, part.num, |
625 | self.path), self.native_sysroot) | 636 | part.part_type), self.native_sysroot) |
626 | 637 | ||
627 | if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): | 638 | if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): |
628 | logger.debug("partition %d: set UUID to %s", | 639 | logger.debug("partition %d: set UUID to %s", |
629 | part.num, part.uuid) | 640 | part.num, part.uuid) |
630 | exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ | 641 | exec_native_cmd("sfdisk --sector-size %s --part-uuid %s %d %s" % \ |
631 | (part.num, part.uuid, self.path), | 642 | (self.sector_size, self.path, part.num, part.uuid), |
632 | self.native_sysroot) | 643 | self.native_sysroot) |
633 | 644 | ||
634 | if part.active: | 645 | if part.active: |
635 | flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" | 646 | flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" |
636 | logger.debug("Set '%s' flag for partition '%s' on disk '%s'", | 647 | logger.debug("Set '%s' flag for partition '%s' on disk '%s'", |
637 | flag_name, part.num, self.path) | 648 | flag_name, part.num, self.path) |
638 | exec_native_cmd("parted -s %s set %d %s on" % \ | 649 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \ |
639 | (self.path, part.num, flag_name), | 650 | (self.sector_size, self.path, part.num, flag_name), |
640 | self.native_sysroot) | 651 | self.native_sysroot) |
641 | if self.ptable_format == 'gpt-hybrid' and part.mbr: | 652 | if self.ptable_format == 'gpt-hybrid' and part.mbr: |
642 | exec_native_cmd("parted -s %s set %d %s on" % \ | 653 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \ |
643 | (mbr_path, hybrid_mbr_part_num, "boot"), | 654 | (self.sector_size, mbr_path, hybrid_mbr_part_num, "boot"), |
644 | self.native_sysroot) | 655 | self.native_sysroot) |
645 | if part.system_id: | 656 | if part.system_id: |
646 | exec_native_cmd("sfdisk --part-type %s %s %s" % \ | 657 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %s %s" % \ |
647 | (self.path, part.num, part.system_id), | 658 | (self.sector_size, self.path, part.num, part.system_id), |
648 | self.native_sysroot) | 659 | self.native_sysroot) |
649 | 660 | ||
650 | if part.hidden and self.ptable_format == "gpt": | 661 | if part.hidden and self.ptable_format == "gpt": |
651 | logger.debug("Set hidden attribute for partition '%s' on disk '%s'", | 662 | logger.debug("Set hidden attribute for partition '%s' on disk '%s'", |
652 | part.num, self.path) | 663 | part.num, self.path) |
653 | exec_native_cmd("sfdisk --part-attrs %s %s RequiredPartition" % \ | 664 | exec_native_cmd("sfdisk --sector-size %s --part-attrs %s %s RequiredPartition" % \ |
654 | (self.path, part.num), | 665 | (self.sector_size, self.path, part.num), |
655 | self.native_sysroot) | 666 | self.native_sysroot) |
656 | 667 | ||
657 | if self.ptable_format == "gpt-hybrid": | 668 | if self.ptable_format == "gpt-hybrid": |
@@ -664,7 +675,8 @@ class PartitionedImage(): | |||
664 | # create with an arbitrary type, then change it to the correct type | 675 | # create with an arbitrary type, then change it to the correct type |
665 | # with sfdisk | 676 | # with sfdisk |
666 | self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) | 677 | self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) |
667 | exec_native_cmd("sfdisk --part-type %s %d 0xee" % (mbr_path, hybrid_mbr_part_num), | 678 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %d 0xee" % \ |
679 | (self.sector_size, mbr_path, hybrid_mbr_part_num), | ||
668 | self.native_sysroot) | 680 | self.native_sysroot) |
669 | 681 | ||
670 | # Copy hybrid MBR | 682 | # Copy hybrid MBR |
diff --git a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py index 5bd7390680..4279ddded8 100644 --- a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py +++ b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py | |||
@@ -13,7 +13,7 @@ | |||
13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
14 | # | 14 | # |
15 | # DESCRIPTION | 15 | # DESCRIPTION |
16 | # This implements the 'bootimg-biosplusefi' source plugin class for 'wic' | 16 | # This implements the 'bootimg_biosplusefi' source plugin class for 'wic' |
17 | # | 17 | # |
18 | # AUTHORS | 18 | # AUTHORS |
19 | # William Bourque <wbourque [at) gmail.com> | 19 | # William Bourque <wbourque [at) gmail.com> |
@@ -34,7 +34,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
34 | 34 | ||
35 | Note it is possible to create an image that can boot from both | 35 | Note it is possible to create an image that can boot from both |
36 | legacy BIOS and EFI by defining two partitions : one with arg | 36 | legacy BIOS and EFI by defining two partitions : one with arg |
37 | --source bootimg-efi and another one with --source bootimg-pcbios. | 37 | --source bootimg_efi and another one with --source bootimg_pcbios. |
38 | However, this method has the obvious downside that it requires TWO | 38 | However, this method has the obvious downside that it requires TWO |
39 | partitions to be created on the storage device. | 39 | partitions to be created on the storage device. |
40 | Both partitions will also be marked as "bootable" which does not work on | 40 | Both partitions will also be marked as "bootable" which does not work on |
@@ -45,7 +45,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
45 | the first partition will be duplicated into the second, even though it | 45 | the first partition will be duplicated into the second, even though it |
46 | will not be used at all. | 46 | will not be used at all. |
47 | 47 | ||
48 | Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin | 48 | Also, unlike "isoimage_isohybrid" that also does BIOS and EFI, this plugin |
49 | allows you to have more than only a single rootfs partitions and does | 49 | allows you to have more than only a single rootfs partitions and does |
50 | not turn the rootfs into an initramfs RAM image. | 50 | not turn the rootfs into an initramfs RAM image. |
51 | 51 | ||
@@ -53,32 +53,32 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
53 | does not have the limitations listed above. | 53 | does not have the limitations listed above. |
54 | 54 | ||
55 | The plugin is made so it does tries not to reimplement what's already | 55 | The plugin is made so it does tries not to reimplement what's already |
56 | been done in other plugins; as such it imports "bootimg-pcbios" | 56 | been done in other plugins; as such it imports "bootimg_pcbios" |
57 | and "bootimg-efi". | 57 | and "bootimg_efi". |
58 | Plugin "bootimg-pcbios" is used to generate legacy BIOS boot. | 58 | Plugin "bootimg_pcbios" is used to generate legacy BIOS boot. |
59 | Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it | 59 | Plugin "bootimg_efi" is used to generate the UEFI boot. Note that it |
60 | requires a --sourceparams argument to know which loader to use; refer | 60 | requires a --sourceparams argument to know which loader to use; refer |
61 | to "bootimg-efi" code/documentation for the list of loader. | 61 | to "bootimg_efi" code/documentation for the list of loader. |
62 | 62 | ||
63 | Imports are handled with "SourceFileLoader" from importlib as it is | 63 | Imports are handled with "SourceFileLoader" from importlib as it is |
64 | otherwise very difficult to import module that has hyphen "-" in their | 64 | otherwise very difficult to import module that has hyphen "-" in their |
65 | filename. | 65 | filename. |
66 | The SourcePlugin() methods used in the plugins (do_install_disk, | 66 | The SourcePlugin() methods used in the plugins (do_install_disk, |
67 | do_configure_partition, do_prepare_partition) are then called on both, | 67 | do_configure_partition, do_prepare_partition) are then called on both, |
68 | beginning by "bootimg-efi". | 68 | beginning by "bootimg_efi". |
69 | 69 | ||
70 | Plugin options, such as "--sourceparams" can still be passed to a | 70 | Plugin options, such as "--sourceparams" can still be passed to a |
71 | plugin, as long they does not cause issue in the other plugin. | 71 | plugin, as long they does not cause issue in the other plugin. |
72 | 72 | ||
73 | Example wic configuration: | 73 | Example wic configuration: |
74 | part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\ | 74 | part /boot --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\\ |
75 | --ondisk sda --label os_boot --active --align 1024 --use-uuid | 75 | --ondisk sda --label os_boot --active --align 1024 --use-uuid |
76 | """ | 76 | """ |
77 | 77 | ||
78 | name = 'bootimg-biosplusefi' | 78 | name = 'bootimg_biosplusefi' |
79 | 79 | ||
80 | __PCBIOS_MODULE_NAME = "bootimg-pcbios" | 80 | __PCBIOS_MODULE_NAME = "bootimg_pcbios" |
81 | __EFI_MODULE_NAME = "bootimg-efi" | 81 | __EFI_MODULE_NAME = "bootimg_efi" |
82 | 82 | ||
83 | __imgEFIObj = None | 83 | __imgEFIObj = None |
84 | __imgBiosObj = None | 84 | __imgBiosObj = None |
@@ -100,7 +100,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
100 | 100 | ||
101 | """ | 101 | """ |
102 | 102 | ||
103 | # Import bootimg-pcbios (class name "BootimgPcbiosPlugin") | 103 | # Import bootimg_pcbios (class name "BootimgPcbiosPlugin") |
104 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), | 104 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), |
105 | cls.__PCBIOS_MODULE_NAME + ".py") | 105 | cls.__PCBIOS_MODULE_NAME + ".py") |
106 | loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) | 106 | loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) |
@@ -108,7 +108,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
108 | loader.exec_module(mod) | 108 | loader.exec_module(mod) |
109 | cls.__imgBiosObj = mod.BootimgPcbiosPlugin() | 109 | cls.__imgBiosObj = mod.BootimgPcbiosPlugin() |
110 | 110 | ||
111 | # Import bootimg-efi (class name "BootimgEFIPlugin") | 111 | # Import bootimg_efi (class name "BootimgEFIPlugin") |
112 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), | 112 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), |
113 | cls.__EFI_MODULE_NAME + ".py") | 113 | cls.__EFI_MODULE_NAME + ".py") |
114 | loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) | 114 | loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg_efi.py index 13a9cddf4e..cf16705a28 100644 --- a/scripts/lib/wic/plugins/source/bootimg-efi.py +++ b/scripts/lib/wic/plugins/source/bootimg_efi.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'bootimg-efi' source plugin class for 'wic' | 7 | # This implements the 'bootimg_efi' source plugin class for 'wic' |
8 | # | 8 | # |
9 | # AUTHORS | 9 | # AUTHORS |
10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> |
@@ -32,7 +32,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
32 | This plugin supports GRUB 2 and systemd-boot bootloaders. | 32 | This plugin supports GRUB 2 and systemd-boot bootloaders. |
33 | """ | 33 | """ |
34 | 34 | ||
35 | name = 'bootimg-efi' | 35 | name = 'bootimg_efi' |
36 | 36 | ||
37 | @classmethod | 37 | @classmethod |
38 | def _copy_additional_files(cls, hdddir, initrd, dtb): | 38 | def _copy_additional_files(cls, hdddir, initrd, dtb): |
@@ -43,16 +43,18 @@ class BootimgEFIPlugin(SourcePlugin): | |||
43 | if initrd: | 43 | if initrd: |
44 | initrds = initrd.split(';') | 44 | initrds = initrd.split(';') |
45 | for rd in initrds: | 45 | for rd in initrds: |
46 | cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir) | 46 | cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, rd, hdddir) |
47 | exec_cmd(cp_cmd, True) | 47 | out = exec_cmd(cp_cmd, True) |
48 | logger.debug("initrd files:\n%s" % (out)) | ||
48 | else: | 49 | else: |
49 | logger.debug("Ignoring missing initrd") | 50 | logger.debug("Ignoring missing initrd") |
50 | 51 | ||
51 | if dtb: | 52 | if dtb: |
52 | if ';' in dtb: | 53 | if ';' in dtb: |
53 | raise WicError("Only one DTB supported, exiting") | 54 | raise WicError("Only one DTB supported, exiting") |
54 | cp_cmd = "cp %s/%s %s" % (bootimg_dir, dtb, hdddir) | 55 | cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, dtb, hdddir) |
55 | exec_cmd(cp_cmd, True) | 56 | out = exec_cmd(cp_cmd, True) |
57 | logger.debug("dtb files:\n%s" % (out)) | ||
56 | 58 | ||
57 | @classmethod | 59 | @classmethod |
58 | def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): | 60 | def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): |
@@ -123,8 +125,16 @@ class BootimgEFIPlugin(SourcePlugin): | |||
123 | @classmethod | 125 | @classmethod |
124 | def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): | 126 | def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): |
125 | """ | 127 | """ |
126 | Create loader-specific systemd-boot/gummiboot config | 128 | Create loader-specific systemd-boot/gummiboot config. Unified Kernel Image (uki) |
129 | support is done in image recipe with uki.bbclass and only systemd-boot loader config | ||
130 | and ESP partition structure is created here. | ||
127 | """ | 131 | """ |
132 | # detect uki.bbclass usage | ||
133 | image_classes = get_bitbake_var("IMAGE_CLASSES").split() | ||
134 | unified_image = False | ||
135 | if "uki" in image_classes: | ||
136 | unified_image = True | ||
137 | |||
128 | install_cmd = "install -d %s/loader" % hdddir | 138 | install_cmd = "install -d %s/loader" % hdddir |
129 | exec_cmd(install_cmd) | 139 | exec_cmd(install_cmd) |
130 | 140 | ||
@@ -132,28 +142,26 @@ class BootimgEFIPlugin(SourcePlugin): | |||
132 | exec_cmd(install_cmd) | 142 | exec_cmd(install_cmd) |
133 | 143 | ||
134 | bootloader = creator.ks.bootloader | 144 | bootloader = creator.ks.bootloader |
135 | |||
136 | unified_image = source_params.get('create-unified-kernel-image') == "true" | ||
137 | |||
138 | loader_conf = "" | 145 | loader_conf = "" |
139 | if not unified_image: | ||
140 | loader_conf += "default boot\n" | ||
141 | loader_conf += "timeout %d\n" % bootloader.timeout | ||
142 | 146 | ||
143 | initrd = source_params.get('initrd') | 147 | # 5 seconds is a sensible default timeout |
144 | dtb = source_params.get('dtb') | 148 | loader_conf += "timeout %d\n" % (bootloader.timeout or 5) |
145 | |||
146 | if not unified_image: | ||
147 | cls._copy_additional_files(hdddir, initrd, dtb) | ||
148 | 149 | ||
149 | logger.debug("Writing systemd-boot config " | 150 | logger.debug("Writing systemd-boot config " |
150 | "%s/hdd/boot/loader/loader.conf", cr_workdir) | 151 | "%s/hdd/boot/loader/loader.conf", cr_workdir) |
151 | cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") | 152 | cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") |
152 | cfg.write(loader_conf) | 153 | cfg.write(loader_conf) |
154 | logger.debug("loader.conf:\n%s" % (loader_conf)) | ||
153 | cfg.close() | 155 | cfg.close() |
154 | 156 | ||
157 | initrd = source_params.get('initrd') | ||
158 | dtb = source_params.get('dtb') | ||
159 | if not unified_image: | ||
160 | cls._copy_additional_files(hdddir, initrd, dtb) | ||
161 | |||
155 | configfile = creator.ks.bootloader.configfile | 162 | configfile = creator.ks.bootloader.configfile |
156 | custom_cfg = None | 163 | custom_cfg = None |
164 | boot_conf = "" | ||
157 | if configfile: | 165 | if configfile: |
158 | custom_cfg = get_custom_config(configfile) | 166 | custom_cfg = get_custom_config(configfile) |
159 | if custom_cfg: | 167 | if custom_cfg: |
@@ -164,8 +172,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
164 | else: | 172 | else: |
165 | raise WicError("configfile is specified but failed to " | 173 | raise WicError("configfile is specified but failed to " |
166 | "get it from %s.", configfile) | 174 | "get it from %s.", configfile) |
167 | 175 | else: | |
168 | if not custom_cfg: | ||
169 | # Create systemd-boot configuration using parameters from wks file | 176 | # Create systemd-boot configuration using parameters from wks file |
170 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | 177 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") |
171 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": | 178 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": |
@@ -175,7 +182,6 @@ class BootimgEFIPlugin(SourcePlugin): | |||
175 | 182 | ||
176 | title = source_params.get('title') | 183 | title = source_params.get('title') |
177 | 184 | ||
178 | boot_conf = "" | ||
179 | boot_conf += "title %s\n" % (title if title else "boot") | 185 | boot_conf += "title %s\n" % (title if title else "boot") |
180 | boot_conf += "linux /%s\n" % kernel | 186 | boot_conf += "linux /%s\n" % kernel |
181 | 187 | ||
@@ -200,6 +206,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
200 | "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) | 206 | "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) |
201 | cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") | 207 | cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") |
202 | cfg.write(boot_conf) | 208 | cfg.write(boot_conf) |
209 | logger.debug("boot.conf:\n%s" % (boot_conf)) | ||
203 | cfg.close() | 210 | cfg.close() |
204 | 211 | ||
205 | 212 | ||
@@ -223,9 +230,9 @@ class BootimgEFIPlugin(SourcePlugin): | |||
223 | elif source_params['loader'] == 'uefi-kernel': | 230 | elif source_params['loader'] == 'uefi-kernel': |
224 | pass | 231 | pass |
225 | else: | 232 | else: |
226 | raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) | 233 | raise WicError("unrecognized bootimg_efi loader: %s" % source_params['loader']) |
227 | except KeyError: | 234 | except KeyError: |
228 | raise WicError("bootimg-efi requires a loader, none specified") | 235 | raise WicError("bootimg_efi requires a loader, none specified") |
229 | 236 | ||
230 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: | 237 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: |
231 | logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') | 238 | logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') |
@@ -245,7 +252,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
245 | 252 | ||
246 | # list of tuples (src_name, dst_name) | 253 | # list of tuples (src_name, dst_name) |
247 | deploy_files = [] | 254 | deploy_files = [] |
248 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | 255 | for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files): |
249 | if ';' in src_entry: | 256 | if ';' in src_entry: |
250 | dst_entry = tuple(src_entry.split(';')) | 257 | dst_entry = tuple(src_entry.split(';')) |
251 | if not dst_entry[0] or not dst_entry[1]: | 258 | if not dst_entry[0] or not dst_entry[1]: |
@@ -304,134 +311,43 @@ class BootimgEFIPlugin(SourcePlugin): | |||
304 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) | 311 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) |
305 | 312 | ||
306 | if source_params.get('create-unified-kernel-image') == "true": | 313 | if source_params.get('create-unified-kernel-image') == "true": |
307 | initrd = source_params.get('initrd') | 314 | raise WicError("create-unified-kernel-image is no longer supported. Please use uki.bbclass.") |
308 | if not initrd: | ||
309 | raise WicError("initrd= must be specified when create-unified-kernel-image=true, exiting") | ||
310 | |||
311 | deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
312 | efi_stub = glob("%s/%s" % (deploy_dir, "linux*.efi.stub")) | ||
313 | if len(efi_stub) == 0: | ||
314 | raise WicError("Unified Kernel Image EFI stub not found, exiting") | ||
315 | efi_stub = efi_stub[0] | ||
316 | |||
317 | with tempfile.TemporaryDirectory() as tmp_dir: | ||
318 | label = source_params.get('label') | ||
319 | label_conf = "root=%s" % creator.rootdev | ||
320 | if label: | ||
321 | label_conf = "LABEL=%s" % label | ||
322 | |||
323 | bootloader = creator.ks.bootloader | ||
324 | cmdline = open("%s/cmdline" % tmp_dir, "w") | ||
325 | cmdline.write("%s %s" % (label_conf, bootloader.append)) | ||
326 | cmdline.close() | ||
327 | 315 | ||
328 | initrds = initrd.split(';') | 316 | if source_params.get('install-kernel-into-boot-dir') != 'false': |
329 | initrd = open("%s/initrd" % tmp_dir, "wb") | 317 | install_cmd = "install -v -p -m 0644 %s/%s %s/%s" % \ |
330 | for f in initrds: | 318 | (staging_kernel_dir, kernel, hdddir, kernel) |
331 | with open("%s/%s" % (deploy_dir, f), 'rb') as in_file: | 319 | out = exec_cmd(install_cmd) |
332 | shutil.copyfileobj(in_file, initrd) | 320 | logger.debug("Installed kernel files:\n%s" % out) |
333 | initrd.close() | ||
334 | |||
335 | # Searched by systemd-boot: | ||
336 | # https://systemd.io/BOOT_LOADER_SPECIFICATION/#type-2-efi-unified-kernel-images | ||
337 | install_cmd = "install -d %s/EFI/Linux" % hdddir | ||
338 | exec_cmd(install_cmd) | ||
339 | |||
340 | staging_dir_host = get_bitbake_var("STAGING_DIR_HOST") | ||
341 | target_sys = get_bitbake_var("TARGET_SYS") | ||
342 | |||
343 | objdump_cmd = "%s-objdump" % target_sys | ||
344 | objdump_cmd += " -p %s" % efi_stub | ||
345 | objdump_cmd += " | awk '{ if ($1 == \"SectionAlignment\"){print $2} }'" | ||
346 | |||
347 | ret, align_str = exec_native_cmd(objdump_cmd, native_sysroot) | ||
348 | align = int(align_str, 16) | ||
349 | |||
350 | objdump_cmd = "%s-objdump" % target_sys | ||
351 | objdump_cmd += " -h %s | tail -2" % efi_stub | ||
352 | ret, output = exec_native_cmd(objdump_cmd, native_sysroot) | ||
353 | |||
354 | offset = int(output.split()[2], 16) + int(output.split()[3], 16) | ||
355 | |||
356 | osrel_off = offset + align - offset % align | ||
357 | osrel_path = "%s/usr/lib/os-release" % staging_dir_host | ||
358 | osrel_sz = os.stat(osrel_path).st_size | ||
359 | |||
360 | cmdline_off = osrel_off + osrel_sz | ||
361 | cmdline_off = cmdline_off + align - cmdline_off % align | ||
362 | cmdline_sz = os.stat(cmdline.name).st_size | ||
363 | |||
364 | dtb_off = cmdline_off + cmdline_sz | ||
365 | dtb_off = dtb_off + align - dtb_off % align | ||
366 | |||
367 | dtb = source_params.get('dtb') | ||
368 | if dtb: | ||
369 | if ';' in dtb: | ||
370 | raise WicError("Only one DTB supported, exiting") | ||
371 | dtb_path = "%s/%s" % (deploy_dir, dtb) | ||
372 | dtb_params = '--add-section .dtb=%s --change-section-vma .dtb=0x%x' % \ | ||
373 | (dtb_path, dtb_off) | ||
374 | linux_off = dtb_off + os.stat(dtb_path).st_size | ||
375 | linux_off = linux_off + align - linux_off % align | ||
376 | else: | ||
377 | dtb_params = '' | ||
378 | linux_off = dtb_off | ||
379 | |||
380 | linux_path = "%s/%s" % (staging_kernel_dir, kernel) | ||
381 | linux_sz = os.stat(linux_path).st_size | ||
382 | |||
383 | initrd_off = linux_off + linux_sz | ||
384 | initrd_off = initrd_off + align - initrd_off % align | ||
385 | |||
386 | # https://www.freedesktop.org/software/systemd/man/systemd-stub.html | ||
387 | objcopy_cmd = "%s-objcopy" % target_sys | ||
388 | objcopy_cmd += " --enable-deterministic-archives" | ||
389 | objcopy_cmd += " --preserve-dates" | ||
390 | objcopy_cmd += " --add-section .osrel=%s" % osrel_path | ||
391 | objcopy_cmd += " --change-section-vma .osrel=0x%x" % osrel_off | ||
392 | objcopy_cmd += " --add-section .cmdline=%s" % cmdline.name | ||
393 | objcopy_cmd += " --change-section-vma .cmdline=0x%x" % cmdline_off | ||
394 | objcopy_cmd += dtb_params | ||
395 | objcopy_cmd += " --add-section .linux=%s" % linux_path | ||
396 | objcopy_cmd += " --change-section-vma .linux=0x%x" % linux_off | ||
397 | objcopy_cmd += " --add-section .initrd=%s" % initrd.name | ||
398 | objcopy_cmd += " --change-section-vma .initrd=0x%x" % initrd_off | ||
399 | objcopy_cmd += " %s %s/EFI/Linux/linux.efi" % (efi_stub, hdddir) | ||
400 | |||
401 | exec_native_cmd(objcopy_cmd, native_sysroot) | ||
402 | else: | ||
403 | if source_params.get('install-kernel-into-boot-dir') != 'false': | ||
404 | install_cmd = "install -m 0644 %s/%s %s/%s" % \ | ||
405 | (staging_kernel_dir, kernel, hdddir, kernel) | ||
406 | exec_cmd(install_cmd) | ||
407 | 321 | ||
408 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): | 322 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): |
409 | for src_path, dst_path in cls.install_task: | 323 | for src_path, dst_path in cls.install_task: |
410 | install_cmd = "install -m 0644 -D %s %s" \ | 324 | install_cmd = "install -v -p -m 0644 -D %s %s" \ |
411 | % (os.path.join(kernel_dir, src_path), | 325 | % (os.path.join(kernel_dir, src_path), |
412 | os.path.join(hdddir, dst_path)) | 326 | os.path.join(hdddir, dst_path)) |
413 | exec_cmd(install_cmd) | 327 | out = exec_cmd(install_cmd) |
328 | logger.debug("Installed IMAGE_EFI_BOOT_FILES:\n%s" % out) | ||
414 | 329 | ||
415 | try: | 330 | try: |
416 | if source_params['loader'] == 'grub-efi': | 331 | if source_params['loader'] == 'grub-efi': |
417 | shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, | 332 | shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, |
418 | "%s/grub.cfg" % cr_workdir) | 333 | "%s/grub.cfg" % cr_workdir) |
419 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: | 334 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: |
420 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) | 335 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) |
421 | exec_cmd(cp_cmd, True) | 336 | exec_cmd(cp_cmd, True) |
422 | shutil.move("%s/grub.cfg" % cr_workdir, | 337 | shutil.move("%s/grub.cfg" % cr_workdir, |
423 | "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) | 338 | "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) |
424 | elif source_params['loader'] == 'systemd-boot': | 339 | elif source_params['loader'] == 'systemd-boot': |
425 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: | 340 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: |
426 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) | 341 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) |
427 | exec_cmd(cp_cmd, True) | 342 | out = exec_cmd(cp_cmd, True) |
343 | logger.debug("systemd-boot files:\n%s" % out) | ||
428 | elif source_params['loader'] == 'uefi-kernel': | 344 | elif source_params['loader'] == 'uefi-kernel': |
429 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | 345 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") |
430 | if not kernel: | 346 | if not kernel: |
431 | raise WicError("Empty KERNEL_IMAGETYPE %s\n" % target) | 347 | raise WicError("Empty KERNEL_IMAGETYPE") |
432 | target = get_bitbake_var("TARGET_SYS") | 348 | target = get_bitbake_var("TARGET_SYS") |
433 | if not target: | 349 | if not target: |
434 | raise WicError("Unknown arch (TARGET_SYS) %s\n" % target) | 350 | raise WicError("Empty TARGET_SYS") |
435 | 351 | ||
436 | if re.match("x86_64", target): | 352 | if re.match("x86_64", target): |
437 | kernel_efi_image = "bootx64.efi" | 353 | kernel_efi_image = "bootx64.efi" |
@@ -445,23 +361,33 @@ class BootimgEFIPlugin(SourcePlugin): | |||
445 | raise WicError("UEFI stub kernel is incompatible with target %s" % target) | 361 | raise WicError("UEFI stub kernel is incompatible with target %s" % target) |
446 | 362 | ||
447 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: | 363 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: |
448 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) | 364 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) |
449 | exec_cmd(cp_cmd, True) | 365 | out = exec_cmd(cp_cmd, True) |
366 | logger.debug("uefi-kernel files:\n%s" % out) | ||
450 | else: | 367 | else: |
451 | raise WicError("unrecognized bootimg-efi loader: %s" % | 368 | raise WicError("unrecognized bootimg_efi loader: %s" % |
452 | source_params['loader']) | 369 | source_params['loader']) |
370 | |||
371 | # must have installed at least one EFI bootloader | ||
372 | out = glob(os.path.join(hdddir, 'EFI', 'BOOT', 'boot*.efi')) | ||
373 | logger.debug("Installed EFI loader files:\n%s" % out) | ||
374 | if not out: | ||
375 | raise WicError("No EFI loaders installed to ESP partition. Check that grub-efi, systemd-boot or similar is installed.") | ||
376 | |||
453 | except KeyError: | 377 | except KeyError: |
454 | raise WicError("bootimg-efi requires a loader, none specified") | 378 | raise WicError("bootimg_efi requires a loader, none specified") |
455 | 379 | ||
456 | startup = os.path.join(kernel_dir, "startup.nsh") | 380 | startup = os.path.join(kernel_dir, "startup.nsh") |
457 | if os.path.exists(startup): | 381 | if os.path.exists(startup): |
458 | cp_cmd = "cp %s %s/" % (startup, hdddir) | 382 | cp_cmd = "cp -v -p %s %s/" % (startup, hdddir) |
459 | exec_cmd(cp_cmd, True) | 383 | out = exec_cmd(cp_cmd, True) |
384 | logger.debug("startup files:\n%s" % out) | ||
460 | 385 | ||
461 | for paths in part.include_path or []: | 386 | for paths in part.include_path or []: |
462 | for path in paths: | 387 | for path in paths: |
463 | cp_cmd = "cp -r %s %s/" % (path, hdddir) | 388 | cp_cmd = "cp -v -p -r %s %s/" % (path, hdddir) |
464 | exec_cmd(cp_cmd, True) | 389 | exec_cmd(cp_cmd, True) |
390 | logger.debug("include_path files:\n%s" % out) | ||
465 | 391 | ||
466 | du_cmd = "du -bks %s" % hdddir | 392 | du_cmd = "du -bks %s" % hdddir |
467 | out = exec_cmd(du_cmd) | 393 | out = exec_cmd(du_cmd) |
@@ -489,12 +415,14 @@ class BootimgEFIPlugin(SourcePlugin): | |||
489 | 415 | ||
490 | label = part.label if part.label else "ESP" | 416 | label = part.label if part.label else "ESP" |
491 | 417 | ||
492 | dosfs_cmd = "mkdosfs -n %s -i %s -C %s %d" % \ | 418 | dosfs_cmd = "mkdosfs -v -n %s -i %s -C %s %d" % \ |
493 | (label, part.fsuuid, bootimg, blocks) | 419 | (label, part.fsuuid, bootimg, blocks) |
494 | exec_native_cmd(dosfs_cmd, native_sysroot) | 420 | exec_native_cmd(dosfs_cmd, native_sysroot) |
421 | logger.debug("mkdosfs:\n%s" % (str(out))) | ||
495 | 422 | ||
496 | mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) | 423 | mcopy_cmd = "mcopy -v -p -i %s -s %s/* ::/" % (bootimg, hdddir) |
497 | exec_native_cmd(mcopy_cmd, native_sysroot) | 424 | out = exec_native_cmd(mcopy_cmd, native_sysroot) |
425 | logger.debug("mcopy:\n%s" % (str(out))) | ||
498 | 426 | ||
499 | chmod_cmd = "chmod 644 %s" % bootimg | 427 | chmod_cmd = "chmod 644 %s" % bootimg |
500 | exec_cmd(chmod_cmd) | 428 | exec_cmd(chmod_cmd) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg_partition.py index 1071d1af3f..cc121a78f0 100644 --- a/scripts/lib/wic/plugins/source/bootimg-partition.py +++ b/scripts/lib/wic/plugins/source/bootimg_partition.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'bootimg-partition' source plugin class for | 7 | # This implements the 'bootimg_partition' source plugin class for |
8 | # 'wic'. The plugin creates an image of boot partition, copying over | 8 | # 'wic'. The plugin creates an image of boot partition, copying over |
9 | # files listed in IMAGE_BOOT_FILES bitbake variable. | 9 | # files listed in IMAGE_BOOT_FILES bitbake variable. |
10 | # | 10 | # |
@@ -16,7 +16,7 @@ import logging | |||
16 | import os | 16 | import os |
17 | import re | 17 | import re |
18 | 18 | ||
19 | from glob import glob | 19 | from oe.bootfiles import get_boot_files |
20 | 20 | ||
21 | from wic import WicError | 21 | from wic import WicError |
22 | from wic.engine import get_custom_config | 22 | from wic.engine import get_custom_config |
@@ -31,7 +31,7 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
31 | listed in IMAGE_BOOT_FILES bitbake variable. | 31 | listed in IMAGE_BOOT_FILES bitbake variable. |
32 | """ | 32 | """ |
33 | 33 | ||
34 | name = 'bootimg-partition' | 34 | name = 'bootimg_partition' |
35 | image_boot_files_var_name = 'IMAGE_BOOT_FILES' | 35 | image_boot_files_var_name = 'IMAGE_BOOT_FILES' |
36 | 36 | ||
37 | @classmethod | 37 | @classmethod |
@@ -66,42 +66,7 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
66 | 66 | ||
67 | logger.debug('Boot files: %s', boot_files) | 67 | logger.debug('Boot files: %s', boot_files) |
68 | 68 | ||
69 | # list of tuples (src_name, dst_name) | 69 | cls.install_task = get_boot_files(kernel_dir, boot_files) |
70 | deploy_files = [] | ||
71 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | ||
72 | if ';' in src_entry: | ||
73 | dst_entry = tuple(src_entry.split(';')) | ||
74 | if not dst_entry[0] or not dst_entry[1]: | ||
75 | raise WicError('Malformed boot file entry: %s' % src_entry) | ||
76 | else: | ||
77 | dst_entry = (src_entry, src_entry) | ||
78 | |||
79 | logger.debug('Destination entry: %r', dst_entry) | ||
80 | deploy_files.append(dst_entry) | ||
81 | |||
82 | cls.install_task = []; | ||
83 | for deploy_entry in deploy_files: | ||
84 | src, dst = deploy_entry | ||
85 | if '*' in src: | ||
86 | # by default install files under their basename | ||
87 | entry_name_fn = os.path.basename | ||
88 | if dst != src: | ||
89 | # unless a target name was given, then treat name | ||
90 | # as a directory and append a basename | ||
91 | entry_name_fn = lambda name: \ | ||
92 | os.path.join(dst, | ||
93 | os.path.basename(name)) | ||
94 | |||
95 | srcs = glob(os.path.join(kernel_dir, src)) | ||
96 | |||
97 | logger.debug('Globbed sources: %s', ', '.join(srcs)) | ||
98 | for entry in srcs: | ||
99 | src = os.path.relpath(entry, kernel_dir) | ||
100 | entry_dst_name = entry_name_fn(entry) | ||
101 | cls.install_task.append((src, entry_dst_name)) | ||
102 | else: | ||
103 | cls.install_task.append((src, dst)) | ||
104 | |||
105 | if source_params.get('loader') != "u-boot": | 70 | if source_params.get('loader') != "u-boot": |
106 | return | 71 | return |
107 | 72 | ||
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg_pcbios.py index a207a83530..21f41e00bb 100644 --- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py +++ b/scripts/lib/wic/plugins/source/bootimg_pcbios.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'bootimg-pcbios' source plugin class for 'wic' | 7 | # This implements the 'bootimg_pcbios' source plugin class for 'wic' |
8 | # | 8 | # |
9 | # AUTHORS | 9 | # AUTHORS |
10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> |
@@ -27,7 +27,7 @@ class BootimgPcbiosPlugin(SourcePlugin): | |||
27 | Create MBR boot partition and install syslinux on it. | 27 | Create MBR boot partition and install syslinux on it. |
28 | """ | 28 | """ |
29 | 29 | ||
30 | name = 'bootimg-pcbios' | 30 | name = 'bootimg_pcbios' |
31 | 31 | ||
32 | @classmethod | 32 | @classmethod |
33 | def _get_bootimg_dir(cls, bootimg_dir, dirname): | 33 | def _get_bootimg_dir(cls, bootimg_dir, dirname): |
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py index 607356ad13..5d42eb5d3e 100644 --- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py +++ b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'isoimage-isohybrid' source plugin class for 'wic' | 7 | # This implements the 'isoimage_isohybrid' source plugin class for 'wic' |
8 | # | 8 | # |
9 | # AUTHORS | 9 | # AUTHORS |
10 | # Mihaly Varga <mihaly.varga (at] ni.com> | 10 | # Mihaly Varga <mihaly.varga (at] ni.com> |
@@ -35,7 +35,7 @@ class IsoImagePlugin(SourcePlugin): | |||
35 | bootloader files. | 35 | bootloader files. |
36 | 36 | ||
37 | Example kickstart file: | 37 | Example kickstart file: |
38 | part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi, \\ | 38 | part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi, \\ |
39 | image_name= IsoImage" --ondisk cd --label LIVECD | 39 | image_name= IsoImage" --ondisk cd --label LIVECD |
40 | bootloader --timeout=10 --append=" " | 40 | bootloader --timeout=10 --append=" " |
41 | 41 | ||
@@ -45,7 +45,7 @@ class IsoImagePlugin(SourcePlugin): | |||
45 | extension added by direct imeger plugin) and a file named IsoImage-cd.iso | 45 | extension added by direct imeger plugin) and a file named IsoImage-cd.iso |
46 | """ | 46 | """ |
47 | 47 | ||
48 | name = 'isoimage-isohybrid' | 48 | name = 'isoimage_isohybrid' |
49 | 49 | ||
50 | @classmethod | 50 | @classmethod |
51 | def do_configure_syslinux(cls, creator, cr_workdir): | 51 | def do_configure_syslinux(cls, creator, cr_workdir): |
@@ -340,10 +340,10 @@ class IsoImagePlugin(SourcePlugin): | |||
340 | cls.do_configure_grubefi(part, creator, target_dir) | 340 | cls.do_configure_grubefi(part, creator, target_dir) |
341 | 341 | ||
342 | else: | 342 | else: |
343 | raise WicError("unrecognized bootimg-efi loader: %s" % | 343 | raise WicError("unrecognized bootimg_efi loader: %s" % |
344 | source_params['loader']) | 344 | source_params['loader']) |
345 | except KeyError: | 345 | except KeyError: |
346 | raise WicError("bootimg-efi requires a loader, none specified") | 346 | raise WicError("bootimg_efi requires a loader, none specified") |
347 | 347 | ||
348 | # Create efi.img that contains bootloader files for EFI booting | 348 | # Create efi.img that contains bootloader files for EFI booting |
349 | # if ISODIR didn't exist or didn't contains it | 349 | # if ISODIR didn't exist or didn't contains it |
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py index c990143c0d..06fce06bb1 100644 --- a/scripts/lib/wic/plugins/source/rootfs.py +++ b/scripts/lib/wic/plugins/source/rootfs.py | |||
@@ -41,7 +41,7 @@ class RootfsPlugin(SourcePlugin): | |||
41 | # Disallow climbing outside of parent directory using '..', | 41 | # Disallow climbing outside of parent directory using '..', |
42 | # because doing so could be quite disastrous (we will delete the | 42 | # because doing so could be quite disastrous (we will delete the |
43 | # directory, or modify a directory outside OpenEmbedded). | 43 | # directory, or modify a directory outside OpenEmbedded). |
44 | full_path = os.path.realpath(os.path.join(rootfs_dir, path)) | 44 | full_path = os.path.abspath(os.path.join(rootfs_dir, path)) |
45 | if not full_path.startswith(os.path.realpath(rootfs_dir)): | 45 | if not full_path.startswith(os.path.realpath(rootfs_dir)): |
46 | logger.error("%s: Must point inside the rootfs: %s" % (cmd, path)) | 46 | logger.error("%s: Must point inside the rootfs: %s" % (cmd, path)) |
47 | sys.exit(1) | 47 | sys.exit(1) |