diff options
Diffstat (limited to 'meta/lib/oeqa/selftest/cases')
73 files changed, 7203 insertions, 1193 deletions
diff --git a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py index f7c356ad09..2c9584d329 100644 --- a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py +++ b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -6,8 +8,8 @@ import os | |||
6 | import shutil | 8 | import shutil |
7 | 9 | ||
8 | import oeqa.utils.ftools as ftools | 10 | import oeqa.utils.ftools as ftools |
9 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer | 11 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var |
10 | from oeqa.selftest.cases.sstate import SStateBase | 12 | from oeqa.selftest.cases.sstatetests import SStateBase |
11 | 13 | ||
12 | 14 | ||
13 | class RebuildFromSState(SStateBase): | 15 | class RebuildFromSState(SStateBase): |
@@ -90,7 +92,7 @@ class RebuildFromSState(SStateBase): | |||
90 | self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate))) | 92 | self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate))) |
91 | 93 | ||
92 | def test_sstate_relocation(self): | 94 | def test_sstate_relocation(self): |
93 | self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=True, rebuild_dependencies=True) | 95 | self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=True, rebuild_dependencies=True) |
94 | 96 | ||
95 | def test_sstate_rebuild(self): | 97 | def test_sstate_rebuild(self): |
96 | self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=False, rebuild_dependencies=True) | 98 | self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=False, rebuild_dependencies=True) |
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py index ddd08ecf84..3cb888c506 100644 --- a/meta/lib/oeqa/selftest/cases/archiver.py +++ b/meta/lib/oeqa/selftest/cases/archiver.py | |||
@@ -1,9 +1,12 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | import os | 7 | import os |
6 | import glob | 8 | import glob |
9 | import re | ||
7 | from oeqa.utils.commands import bitbake, get_bb_vars | 10 | from oeqa.utils.commands import bitbake, get_bb_vars |
8 | from oeqa.selftest.case import OESelftestTestCase | 11 | from oeqa.selftest.case import OESelftestTestCase |
9 | 12 | ||
@@ -35,11 +38,11 @@ class Archiver(OESelftestTestCase): | |||
35 | src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) | 38 | src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) |
36 | 39 | ||
37 | # Check that include_recipe was included | 40 | # Check that include_recipe was included |
38 | included_present = len(glob.glob(src_path + '/%s-*' % include_recipe)) | 41 | included_present = len(glob.glob(src_path + '/%s-*/*' % include_recipe)) |
39 | self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe) | 42 | self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe) |
40 | 43 | ||
41 | # Check that exclude_recipe was excluded | 44 | # Check that exclude_recipe was excluded |
42 | excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe)) | 45 | excluded_present = len(glob.glob(src_path + '/%s-*/*' % exclude_recipe)) |
43 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe) | 46 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe) |
44 | 47 | ||
45 | def test_archiver_filters_by_type(self): | 48 | def test_archiver_filters_by_type(self): |
@@ -67,11 +70,11 @@ class Archiver(OESelftestTestCase): | |||
67 | src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) | 70 | src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) |
68 | 71 | ||
69 | # Check that target_recipe was included | 72 | # Check that target_recipe was included |
70 | included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe)) | 73 | included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipe)) |
71 | self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe) | 74 | self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe) |
72 | 75 | ||
73 | # Check that native_recipe was excluded | 76 | # Check that native_recipe was excluded |
74 | excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe)) | 77 | excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipe)) |
75 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe) | 78 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe) |
76 | 79 | ||
77 | def test_archiver_filters_by_type_and_name(self): | 80 | def test_archiver_filters_by_type_and_name(self): |
@@ -104,20 +107,51 @@ class Archiver(OESelftestTestCase): | |||
104 | src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) | 107 | src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) |
105 | 108 | ||
106 | # Check that target_recipe[0] and native_recipes[1] were included | 109 | # Check that target_recipe[0] and native_recipes[1] were included |
107 | included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0])) | 110 | included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[0])) |
108 | self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0]) | 111 | self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0]) |
109 | 112 | ||
110 | included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1])) | 113 | included_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[1])) |
111 | self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1]) | 114 | self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1]) |
112 | 115 | ||
113 | # Check that native_recipes[0] and target_recipes[1] were excluded | 116 | # Check that native_recipes[0] and target_recipes[1] were excluded |
114 | excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0])) | 117 | excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[0])) |
115 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0]) | 118 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0]) |
116 | 119 | ||
117 | excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1])) | 120 | excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1])) |
118 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1]) | 121 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1]) |
119 | 122 | ||
123 | def test_archiver_multiconfig_shared_unpack_and_patch(self): | ||
124 | """ | ||
125 | Test that shared recipes in original mode with diff enabled works in multiconfig, | ||
126 | otherwise it will not build when using the same TMP dir. | ||
127 | """ | ||
128 | |||
129 | features = 'BBMULTICONFIG = "mc1 mc2"\n' | ||
130 | features += 'INHERIT += "archiver"\n' | ||
131 | features += 'ARCHIVER_MODE[src] = "original"\n' | ||
132 | features += 'ARCHIVER_MODE[diff] = "1"\n' | ||
133 | self.write_config(features) | ||
134 | |||
135 | # We can use any machine in multiconfig as long as they are different | ||
136 | self.write_config('MACHINE = "qemuarm"\n', 'mc1') | ||
137 | self.write_config('MACHINE = "qemux86"\n', 'mc2') | ||
138 | |||
139 | task = 'do_unpack_and_patch' | ||
140 | # Use gcc-source as it is a shared recipe (appends the pv to the pn) | ||
141 | pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV'] | ||
142 | |||
143 | # Generate the tasks signatures | ||
144 | bitbake('mc:mc1:%s mc:mc2:%s -c %s -S lockedsigs' % (pn, pn, task)) | ||
120 | 145 | ||
146 | # Check the tasks signatures | ||
147 | # To be machine agnostic the tasks needs to generate the same signature for each machine | ||
148 | locked_sigs_inc = "%s/locked-sigs.inc" % self.builddir | ||
149 | locked_sigs = open(locked_sigs_inc).read() | ||
150 | task_sigs = re.findall(r"%s:%s:.*" % (pn, task), locked_sigs) | ||
151 | uniq_sigs = set(task_sigs) | ||
152 | self.assertFalse(len(uniq_sigs) - 1, \ | ||
153 | 'The task "%s" of the recipe "%s" has different signatures in "%s" for each machine in multiconfig' \ | ||
154 | % (task, pn, locked_sigs_inc)) | ||
121 | 155 | ||
122 | def test_archiver_srpm_mode(self): | 156 | def test_archiver_srpm_mode(self): |
123 | """ | 157 | """ |
@@ -163,21 +197,21 @@ class Archiver(OESelftestTestCase): | |||
163 | Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`. | 197 | Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`. |
164 | """ | 198 | """ |
165 | 199 | ||
166 | self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.gz') | 200 | self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.xz') |
167 | 201 | ||
168 | def test_archiver_mode_configured(self): | 202 | def test_archiver_mode_configured(self): |
169 | """ | 203 | """ |
170 | Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`. | 204 | Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`. |
171 | """ | 205 | """ |
172 | 206 | ||
173 | self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.gz') | 207 | self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.xz') |
174 | 208 | ||
175 | def test_archiver_mode_recipe(self): | 209 | def test_archiver_mode_recipe(self): |
176 | """ | 210 | """ |
177 | Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`. | 211 | Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`. |
178 | """ | 212 | """ |
179 | 213 | ||
180 | self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.gz', | 214 | self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.xz', |
181 | 'ARCHIVER_MODE[recipe] = "1"\n') | 215 | 'ARCHIVER_MODE[recipe] = "1"\n') |
182 | 216 | ||
183 | def test_archiver_mode_diff(self): | 217 | def test_archiver_mode_diff(self): |
diff --git a/meta/lib/oeqa/selftest/cases/baremetal.py b/meta/lib/oeqa/selftest/cases/baremetal.py new file mode 100644 index 0000000000..cadaea2f1a --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/baremetal.py | |||
@@ -0,0 +1,14 @@ | |||
1 | |||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # SPDX-License-Identifier: MIT | ||
6 | # | ||
7 | |||
8 | from oeqa.selftest.case import OESelftestTestCase | ||
9 | from oeqa.utils.commands import bitbake | ||
10 | |||
11 | class BaremetalTest(OESelftestTestCase): | ||
12 | def test_baremetal(self): | ||
13 | self.write_config('TCLIBC = "baremetal"') | ||
14 | bitbake('baremetal-helloworld') | ||
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py index f131d9856c..695d17377d 100644 --- a/meta/lib/oeqa/selftest/cases/bblayers.py +++ b/meta/lib/oeqa/selftest/cases/bblayers.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -6,12 +8,23 @@ import os | |||
6 | import re | 8 | import re |
7 | 9 | ||
8 | import oeqa.utils.ftools as ftools | 10 | import oeqa.utils.ftools as ftools |
9 | from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars | 11 | from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake |
10 | 12 | ||
11 | from oeqa.selftest.case import OESelftestTestCase | 13 | from oeqa.selftest.case import OESelftestTestCase |
12 | 14 | ||
13 | class BitbakeLayers(OESelftestTestCase): | 15 | class BitbakeLayers(OESelftestTestCase): |
14 | 16 | ||
17 | @classmethod | ||
18 | def setUpClass(cls): | ||
19 | super(BitbakeLayers, cls).setUpClass() | ||
20 | bitbake("python3-jsonschema-native") | ||
21 | bitbake("-c addto_recipe_sysroot python3-jsonschema-native") | ||
22 | |||
23 | def test_bitbakelayers_layerindexshowdepends(self): | ||
24 | result = runCmd('bitbake-layers layerindex-show-depends meta-poky') | ||
25 | find_in_contents = re.search("openembedded-core", result.output) | ||
26 | self.assertTrue(find_in_contents, msg = "openembedded-core should have been listed at this step. bitbake-layers layerindex-show-depends meta-poky output: %s" % result.output) | ||
27 | |||
15 | def test_bitbakelayers_showcrossdepends(self): | 28 | def test_bitbakelayers_showcrossdepends(self): |
16 | result = runCmd('bitbake-layers show-cross-depends') | 29 | result = runCmd('bitbake-layers show-cross-depends') |
17 | self.assertIn('aspell', result.output) | 30 | self.assertIn('aspell', result.output) |
@@ -41,7 +54,7 @@ class BitbakeLayers(OESelftestTestCase): | |||
41 | bb_file = os.path.join(testoutdir, recipe_path, recipe_file) | 54 | bb_file = os.path.join(testoutdir, recipe_path, recipe_file) |
42 | self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.") | 55 | self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.") |
43 | contents = ftools.read_file(bb_file) | 56 | contents = ftools.read_file(bb_file) |
44 | find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents) | 57 | find_in_contents = re.search(r"##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents) |
45 | self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output) | 58 | self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output) |
46 | 59 | ||
47 | def test_bitbakelayers_add_remove(self): | 60 | def test_bitbakelayers_add_remove(self): |
@@ -72,8 +85,9 @@ class BitbakeLayers(OESelftestTestCase): | |||
72 | result = runCmd('bitbake-layers show-recipes -i image') | 85 | result = runCmd('bitbake-layers show-recipes -i image') |
73 | self.assertIn('core-image-minimal', result.output) | 86 | self.assertIn('core-image-minimal', result.output) |
74 | self.assertNotIn('mtd-utils:', result.output) | 87 | self.assertNotIn('mtd-utils:', result.output) |
75 | result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig') | 88 | result = runCmd('bitbake-layers show-recipes -i meson,pkgconfig') |
76 | self.assertIn('libproxy:', result.output) | 89 | self.assertIn('libproxy:', result.output) |
90 | result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig') | ||
77 | self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either | 91 | self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either |
78 | self.assertNotIn('wget:', result.output) # doesn't inherit cmake | 92 | self.assertNotIn('wget:', result.output) # doesn't inherit cmake |
79 | self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig | 93 | self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig |
@@ -106,6 +120,11 @@ class BitbakeLayers(OESelftestTestCase): | |||
106 | 120 | ||
107 | self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority)) | 121 | self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority)) |
108 | 122 | ||
123 | result = runCmd('bitbake-layers save-build-conf {} {}'.format(layerpath, "buildconf-1")) | ||
124 | for f in ('local.conf.sample', 'bblayers.conf.sample', 'conf-summary.txt', 'conf-notes.txt'): | ||
125 | fullpath = os.path.join(layerpath, "conf", "templates", "buildconf-1", f) | ||
126 | self.assertTrue(os.path.exists(fullpath), "Template configuration file {} not found".format(fullpath)) | ||
127 | |||
109 | def get_recipe_basename(self, recipe): | 128 | def get_recipe_basename(self, recipe): |
110 | recipe_file = "" | 129 | recipe_file = "" |
111 | result = runCmd("bitbake-layers show-recipes -f %s" % recipe) | 130 | result = runCmd("bitbake-layers show-recipes -f %s" % recipe) |
@@ -116,3 +135,108 @@ class BitbakeLayers(OESelftestTestCase): | |||
116 | 135 | ||
117 | self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe) | 136 | self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe) |
118 | return os.path.basename(recipe_file) | 137 | return os.path.basename(recipe_file) |
138 | |||
139 | def validate_layersjson(self, json): | ||
140 | python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'nativepython3') | ||
141 | jsonvalidator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'jsonschema') | ||
142 | jsonschema = os.path.join(get_bb_var('COREBASE'), 'meta/files/layers.schema.json') | ||
143 | result = runCmd("{} {} -i {} {}".format(python, jsonvalidator, json, jsonschema)) | ||
144 | |||
145 | def test_validate_examplelayersjson(self): | ||
146 | json = os.path.join(get_bb_var('COREBASE'), "meta/files/layers.example.json") | ||
147 | self.validate_layersjson(json) | ||
148 | |||
149 | def test_bitbakelayers_setup(self): | ||
150 | result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path)) | ||
151 | jsonfile = os.path.join(self.testlayer_path, "setup-layers.json") | ||
152 | self.validate_layersjson(jsonfile) | ||
153 | |||
154 | # The revision-under-test may not necessarily be available on the remote server, | ||
155 | # so replace it with a revision that has a yocto-4.1 tag. | ||
156 | import json | ||
157 | with open(jsonfile) as f: | ||
158 | data = json.load(f) | ||
159 | for s in data['sources']: | ||
160 | data['sources'][s]['git-remote']['rev'] = '5200799866b92259e855051112520006e1aaaac0' | ||
161 | with open(jsonfile, 'w') as f: | ||
162 | json.dump(data, f) | ||
163 | |||
164 | testcheckoutdir = os.path.join(self.builddir, 'test-layer-checkout') | ||
165 | result = runCmd('{}/setup-layers --destdir {}'.format(self.testlayer_path, testcheckoutdir)) | ||
166 | layers_json = os.path.join(testcheckoutdir, ".oe-layers.json") | ||
167 | self.assertTrue(os.path.exists(layers_json), "File {} not found in test layer checkout".format(layers_json)) | ||
168 | |||
169 | # As setup-layers checkout out an old revision of poky, there is no setup-build symlink, | ||
170 | # and we need to run oe-setup-build directly from the current poky tree under test | ||
171 | oe_setup_build = os.path.join(get_bb_var('COREBASE'), 'scripts/oe-setup-build') | ||
172 | oe_setup_build_l = os.path.join(testcheckoutdir, 'setup-build') | ||
173 | os.symlink(oe_setup_build,oe_setup_build_l) | ||
174 | |||
175 | cmd = '{} --layerlist {} list -v'.format(oe_setup_build_l, layers_json) | ||
176 | result = runCmd(cmd) | ||
177 | cond = "conf/templates/default" in result.output | ||
178 | self.assertTrue(cond, "Incorrect output from {}: {}".format(cmd, result.output)) | ||
179 | |||
180 | # rather than hardcode the build setup cmdline here, let's actually run what the tool suggests to the user | ||
181 | conf = None | ||
182 | if 'poky-default' in result.output: | ||
183 | conf = 'poky-default' | ||
184 | elif 'meta-default' in result.output: | ||
185 | conf = 'meta-default' | ||
186 | self.assertIsNotNone(conf, "Could not find the configuration to set up a build in the output: {}".format(result.output)) | ||
187 | |||
188 | cmd = '{} --layerlist {} setup -c {} --no-shell'.format(oe_setup_build_l, layers_json, conf) | ||
189 | result = runCmd(cmd) | ||
190 | |||
191 | def test_bitbakelayers_updatelayer(self): | ||
192 | result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path)) | ||
193 | jsonfile = os.path.join(self.testlayer_path, "setup-layers.json") | ||
194 | self.validate_layersjson(jsonfile) | ||
195 | |||
196 | import json | ||
197 | with open(jsonfile) as f: | ||
198 | data = json.load(f) | ||
199 | repos = [] | ||
200 | for s in data['sources']: | ||
201 | repos.append(s) | ||
202 | |||
203 | self.assertTrue(len(repos) > 1, "Not enough repositories available") | ||
204 | self.validate_layersjson(jsonfile) | ||
205 | |||
206 | test_ref_1 = 'ref_1' | ||
207 | test_ref_2 = 'ref_2' | ||
208 | |||
209 | # Create a new layers setup using custom references | ||
210 | result = runCmd('bitbake-layers create-layers-setup --use-custom-reference {first_repo}:{test_ref} --use-custom-reference {second_repo}:{test_ref} {path}' | ||
211 | .format(first_repo=repos[0], second_repo=repos[1], test_ref=test_ref_1, path=self.testlayer_path)) | ||
212 | self.validate_layersjson(jsonfile) | ||
213 | |||
214 | with open(jsonfile) as f: | ||
215 | data = json.load(f) | ||
216 | first_rev_1 = data['sources'][repos[0]]['git-remote']['rev'] | ||
217 | first_desc_1 = data['sources'][repos[0]]['git-remote']['describe'] | ||
218 | second_rev_1 = data['sources'][repos[1]]['git-remote']['rev'] | ||
219 | second_desc_1 = data['sources'][repos[1]]['git-remote']['describe'] | ||
220 | |||
221 | self.assertEqual(first_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(first_rev_1)) | ||
222 | self.assertEqual(first_desc_1, '', "Describe not cleared: '{}'".format(first_desc_1)) | ||
223 | self.assertEqual(second_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(second_rev_1)) | ||
224 | self.assertEqual(second_desc_1, '', "Describe not cleared: '{}'".format(second_desc_1)) | ||
225 | |||
226 | # Update one of the repositories in the layers setup using a different custom reference | ||
227 | # This should only update the selected repository, everything else should remain as is | ||
228 | result = runCmd('bitbake-layers create-layers-setup --update --use-custom-reference {first_repo}:{test_ref} {path}' | ||
229 | .format(first_repo=repos[0], test_ref=test_ref_2, path=self.testlayer_path)) | ||
230 | self.validate_layersjson(jsonfile) | ||
231 | |||
232 | with open(jsonfile) as f: | ||
233 | data = json.load(f) | ||
234 | first_rev_2 = data['sources'][repos[0]]['git-remote']['rev'] | ||
235 | first_desc_2 = data['sources'][repos[0]]['git-remote']['describe'] | ||
236 | second_rev_2 = data['sources'][repos[1]]['git-remote']['rev'] | ||
237 | second_desc_2 = data['sources'][repos[1]]['git-remote']['describe'] | ||
238 | |||
239 | self.assertEqual(first_rev_2, test_ref_2, "Revision not set correctly: '{}'".format(first_rev_2)) | ||
240 | self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2)) | ||
241 | self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2)) | ||
242 | self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2)) | ||
diff --git a/meta/lib/oeqa/selftest/cases/bblock.py b/meta/lib/oeqa/selftest/cases/bblock.py new file mode 100644 index 0000000000..2b62d2a0aa --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/bblock.py | |||
@@ -0,0 +1,203 @@ | |||
1 | # | ||
2 | # Copyright (c) 2023 BayLibre, SAS | ||
3 | # Author: Julien Stepahn <jstephan@baylibre.com> | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | import os | ||
9 | import re | ||
10 | import bb.tinfoil | ||
11 | |||
12 | import oeqa.utils.ftools as ftools | ||
13 | from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake | ||
14 | |||
15 | from oeqa.selftest.case import OESelftestTestCase | ||
16 | |||
17 | |||
18 | class BBLock(OESelftestTestCase): | ||
19 | @classmethod | ||
20 | def setUpClass(cls): | ||
21 | super(BBLock, cls).setUpClass() | ||
22 | cls.lockfile = cls.builddir + "/conf/bblock.conf" | ||
23 | |||
24 | def unlock_recipes(self, recipes=None, tasks=None): | ||
25 | cmd = "bblock -r " | ||
26 | if recipes: | ||
27 | cmd += " ".join(recipes) | ||
28 | if tasks: | ||
29 | cmd += " -t " + ",".join(tasks) | ||
30 | result = runCmd(cmd) | ||
31 | |||
32 | if recipes: | ||
33 | # ensure all signatures are removed from lockfile | ||
34 | contents = ftools.read_file(self.lockfile) | ||
35 | for recipe in recipes: | ||
36 | for task in tasks: | ||
37 | find_in_contents = re.search( | ||
38 | 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task), | ||
39 | contents, | ||
40 | ) | ||
41 | self.assertFalse( | ||
42 | find_in_contents, | ||
43 | msg="%s:%s should not be present into bblock.conf anymore" | ||
44 | % (recipe, task), | ||
45 | ) | ||
46 | self.assertExists(self.lockfile) | ||
47 | else: | ||
48 | self.assertNotExists(self.lockfile) | ||
49 | |||
50 | def lock_recipes(self, recipes, tasks=None): | ||
51 | cmd = "bblock " + " ".join(recipes) | ||
52 | if tasks: | ||
53 | cmd += " -t " + ",".join(tasks) | ||
54 | |||
55 | result = runCmd(cmd) | ||
56 | |||
57 | self.assertExists(self.lockfile) | ||
58 | |||
59 | # ensure all signatures are added to lockfile | ||
60 | contents = ftools.read_file(self.lockfile) | ||
61 | for recipe in recipes: | ||
62 | if tasks: | ||
63 | for task in tasks: | ||
64 | find_in_contents = re.search( | ||
65 | 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task), | ||
66 | contents, | ||
67 | ) | ||
68 | self.assertTrue( | ||
69 | find_in_contents, | ||
70 | msg="%s:%s was not added into bblock.conf. bblock output: %s" | ||
71 | % (recipe, task, result.output), | ||
72 | ) | ||
73 | |||
74 | def modify_tasks(self, recipes, tasks): | ||
75 | task_append = "" | ||
76 | for recipe in recipes: | ||
77 | bb_vars = get_bb_vars(["PV"], recipe) | ||
78 | recipe_pv = bb_vars["PV"] | ||
79 | recipe_append_file = recipe + "_" + recipe_pv + ".bbappend" | ||
80 | |||
81 | os.mkdir(os.path.join(self.testlayer_path, "recipes-test", recipe)) | ||
82 | recipe_append_path = os.path.join( | ||
83 | self.testlayer_path, "recipes-test", recipe, recipe_append_file | ||
84 | ) | ||
85 | |||
86 | for task in tasks: | ||
87 | task_append += "%s:append() {\n#modify task hash \n}\n" % task | ||
88 | ftools.write_file(recipe_append_path, task_append) | ||
89 | self.add_command_to_tearDown( | ||
90 | "rm -rf %s" % os.path.join(self.testlayer_path, "recipes-test", recipe) | ||
91 | ) | ||
92 | |||
93 | def test_lock_single_recipe_single_task(self): | ||
94 | recipes = ["quilt"] | ||
95 | tasks = ["do_compile"] | ||
96 | self._run_test(recipes, tasks) | ||
97 | |||
98 | def test_lock_single_recipe_multiple_tasks(self): | ||
99 | recipes = ["quilt"] | ||
100 | tasks = ["do_compile", "do_install"] | ||
101 | self._run_test(recipes, tasks) | ||
102 | |||
103 | def test_lock_single_recipe_all_tasks(self): | ||
104 | recipes = ["quilt"] | ||
105 | self._run_test(recipes, None) | ||
106 | |||
107 | def test_lock_multiple_recipe_single_task(self): | ||
108 | recipes = ["quilt", "bc"] | ||
109 | tasks = ["do_compile"] | ||
110 | self._run_test(recipes, tasks) | ||
111 | |||
112 | def test_lock_architecture_specific(self): | ||
113 | # unlock all recipes and ensure no bblock.conf file exist | ||
114 | self.unlock_recipes() | ||
115 | |||
116 | recipes = ["quilt"] | ||
117 | tasks = ["do_compile"] | ||
118 | |||
119 | # lock quilt's do_compile task for another machine | ||
120 | if self.td["MACHINE"] == "qemux86-64": | ||
121 | machine = "qemuarm" | ||
122 | else: | ||
123 | machine = "qemux86-64" | ||
124 | |||
125 | self.write_config('MACHINE = "%s"\n' % machine) | ||
126 | |||
127 | self.lock_recipes(recipes, tasks) | ||
128 | |||
129 | self.write_config('MACHINE = "%s"\n' % self.td["MACHINE"]) | ||
130 | # modify quilt's do_compile task | ||
131 | self.modify_tasks(recipes, tasks) | ||
132 | |||
133 | # build quilt using the default machine | ||
134 | # No Note/Warning should be emitted since sig is locked for another machine | ||
135 | # (quilt package is architecture dependant) | ||
136 | info_message = "NOTE: The following recipes have locked tasks: " + recipes[0] | ||
137 | warn_message = "The %s:%s sig is computed to be" % (recipes[0], tasks[0]) | ||
138 | result = bitbake(recipes[0] + " -n") | ||
139 | self.assertNotIn(info_message, result.output) | ||
140 | self.assertNotIn(warn_message, result.output) | ||
141 | |||
142 | # unlock all recipes | ||
143 | self.unlock_recipes() | ||
144 | |||
145 | def _run_test(self, recipes, tasks=None): | ||
146 | # unlock all recipes and ensure no bblock.conf file exist | ||
147 | self.unlock_recipes() | ||
148 | |||
149 | self.write_config('BB_SIGNATURE_HANDLER = "OEBasicHash"') | ||
150 | |||
151 | # lock tasks for recipes | ||
152 | result = self.lock_recipes(recipes, tasks) | ||
153 | |||
154 | if not tasks: | ||
155 | tasks = [] | ||
156 | result = bitbake("-c listtasks " + recipes[0]) | ||
157 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
158 | tinfoil.prepare(config_only=False, quiet=2) | ||
159 | d = tinfoil.parse_recipe(recipes[0]) | ||
160 | |||
161 | for line in result.output.splitlines(): | ||
162 | if line.startswith("do_"): | ||
163 | task = line.split()[0] | ||
164 | if "setscene" in task: | ||
165 | continue | ||
166 | if d.getVarFlag(task, "nostamp"): | ||
167 | continue | ||
168 | tasks.append(task) | ||
169 | |||
170 | # build recipes. At this stage we should have a Note about recipes | ||
171 | # having locked task's sig, but no warning since sig still match | ||
172 | info_message = "NOTE: The following recipes have locked tasks: " + " ".join( | ||
173 | recipes | ||
174 | ) | ||
175 | for recipe in recipes: | ||
176 | result = bitbake(recipe + " -n") | ||
177 | self.assertIn(info_message, result.output) | ||
178 | for task in tasks: | ||
179 | warn_message = "The %s:%s sig is computed to be" % (recipe, task) | ||
180 | self.assertNotIn(warn_message, result.output) | ||
181 | |||
182 | # modify all tasks that are locked to trigger a sig change then build the recipes | ||
183 | # at this stage we should have a Note as before, but also a Warning for all | ||
184 | # locked tasks indicating the sig mismatch | ||
185 | self.modify_tasks(recipes, tasks) | ||
186 | for recipe in recipes: | ||
187 | result = bitbake(recipe + " -n") | ||
188 | self.assertIn(info_message, result.output) | ||
189 | for task in tasks: | ||
190 | warn_message = "The %s:%s sig is computed to be" % (recipe, task) | ||
191 | self.assertIn(warn_message, result.output) | ||
192 | |||
193 | # unlock all tasks and rebuild, no more Note/Warning should remain | ||
194 | self.unlock_recipes(recipes, tasks) | ||
195 | for recipe in recipes: | ||
196 | result = bitbake(recipe + " -n") | ||
197 | self.assertNotIn(info_message, result.output) | ||
198 | for task in tasks: | ||
199 | warn_message = "The %s:%s sig is computed to be" % (recipe, task) | ||
200 | self.assertNotIn(warn_message, result.output) | ||
201 | |||
202 | # unlock all recipes | ||
203 | self.unlock_recipes() | ||
diff --git a/meta/lib/oeqa/selftest/cases/bblogging.py b/meta/lib/oeqa/selftest/cases/bblogging.py new file mode 100644 index 0000000000..040c6db089 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/bblogging.py | |||
@@ -0,0 +1,182 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | |||
8 | from oeqa.selftest.case import OESelftestTestCase | ||
9 | from oeqa.utils.commands import bitbake | ||
10 | |||
11 | class BitBakeLogging(OESelftestTestCase): | ||
12 | |||
13 | def assertCount(self, item, entry, count): | ||
14 | self.assertEqual(item.count(entry), count, msg="Output:\n'''\n%s\n'''\ndoesn't contain %d copies of:\n'''\n%s\n'''\n" % (item, count, entry)) | ||
15 | |||
16 | def test_shell_loggingA(self): | ||
17 | # no logs, no verbose | ||
18 | self.write_config('BBINCLUDELOGS = ""') | ||
19 | result = bitbake("logging-test -c shelltest -f", ignore_status = True) | ||
20 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
21 | self.assertNotIn("This is shell stdout", result.output) | ||
22 | self.assertNotIn("This is shell stderr", result.output) | ||
23 | |||
24 | def test_shell_loggingB(self): | ||
25 | # logs, no verbose | ||
26 | self.write_config('BBINCLUDELOGS = "yes"') | ||
27 | result = bitbake("logging-test -c shelltest -f", ignore_status = True) | ||
28 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
29 | self.assertCount(result.output, "This is shell stdout", 1) | ||
30 | self.assertCount(result.output, "This is shell stderr", 1) | ||
31 | |||
32 | def test_shell_loggingC(self): | ||
33 | # no logs, verbose | ||
34 | self.write_config('BBINCLUDELOGS = ""') | ||
35 | result = bitbake("logging-test -c shelltest -f -v", ignore_status = True) | ||
36 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
37 | # two copies due to set +x | ||
38 | self.assertCount(result.output, "This is shell stdout", 2) | ||
39 | self.assertCount(result.output, "This is shell stderr", 2) | ||
40 | |||
41 | def test_shell_loggingD(self): | ||
42 | # logs, verbose | ||
43 | self.write_config('BBINCLUDELOGS = "yes"') | ||
44 | result = bitbake("logging-test -c shelltest -f -v", ignore_status = True) | ||
45 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
46 | # two copies due to set +x | ||
47 | self.assertCount(result.output, "This is shell stdout", 2) | ||
48 | self.assertCount(result.output, "This is shell stderr", 2) | ||
49 | |||
50 | def test_python_exec_func_shell_loggingA(self): | ||
51 | # no logs, no verbose | ||
52 | self.write_config('BBINCLUDELOGS = ""') | ||
53 | result = bitbake("logging-test -c pythontest_exec_func_shell -f", | ||
54 | ignore_status = True) | ||
55 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
56 | self.assertNotIn("This is shell stdout", result.output) | ||
57 | self.assertNotIn("This is shell stderr", result.output) | ||
58 | |||
59 | def test_python_exec_func_shell_loggingB(self): | ||
60 | # logs, no verbose | ||
61 | self.write_config('BBINCLUDELOGS = "yes"') | ||
62 | result = bitbake("logging-test -c pythontest_exec_func_shell -f", | ||
63 | ignore_status = True) | ||
64 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
65 | self.assertCount(result.output, "This is shell stdout", 1) | ||
66 | self.assertCount(result.output, "This is shell stderr", 1) | ||
67 | |||
68 | def test_python_exec_func_shell_loggingC(self): | ||
69 | # no logs, verbose | ||
70 | self.write_config('BBINCLUDELOGS = ""') | ||
71 | result = bitbake("logging-test -c pythontest_exec_func_shell -f -v", | ||
72 | ignore_status = True) | ||
73 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
74 | # two copies due to set +x | ||
75 | self.assertCount(result.output, "This is shell stdout", 2) | ||
76 | self.assertCount(result.output, "This is shell stderr", 2) | ||
77 | |||
78 | def test_python_exec_func_shell_loggingD(self): | ||
79 | # logs, verbose | ||
80 | self.write_config('BBINCLUDELOGS = "yes"') | ||
81 | result = bitbake("logging-test -c pythontest_exec_func_shell -f -v", | ||
82 | ignore_status = True) | ||
83 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
84 | # two copies due to set +x | ||
85 | self.assertCount(result.output, "This is shell stdout", 2) | ||
86 | self.assertCount(result.output, "This is shell stderr", 2) | ||
87 | |||
88 | def test_python_exit_loggingA(self): | ||
89 | # no logs, no verbose | ||
90 | self.write_config('BBINCLUDELOGS = ""') | ||
91 | result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True) | ||
92 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
93 | self.assertNotIn("This is python stdout", result.output) | ||
94 | |||
95 | def test_python_exit_loggingB(self): | ||
96 | # logs, no verbose | ||
97 | self.write_config('BBINCLUDELOGS = "yes"') | ||
98 | result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True) | ||
99 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
100 | # A sys.exit() should include the output | ||
101 | self.assertCount(result.output, "This is python stdout", 1) | ||
102 | |||
103 | def test_python_exit_loggingC(self): | ||
104 | # no logs, verbose | ||
105 | self.write_config('BBINCLUDELOGS = ""') | ||
106 | result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True) | ||
107 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
108 | self.assertCount(result.output, "This is python stdout", 1) | ||
109 | |||
110 | def test_python_exit_loggingD(self): | ||
111 | # logs, verbose | ||
112 | self.write_config('BBINCLUDELOGS = "yes"') | ||
113 | result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True) | ||
114 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
115 | self.assertCount(result.output, "This is python stdout", 1) | ||
116 | |||
117 | def test_python_exec_func_python_loggingA(self): | ||
118 | # no logs, no verbose | ||
119 | self.write_config('BBINCLUDELOGS = ""') | ||
120 | result = bitbake("logging-test -c pythontest_exec_func_python -f", | ||
121 | ignore_status = True) | ||
122 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
123 | self.assertNotIn("This is python stdout", result.output) | ||
124 | |||
125 | def test_python_exec_func_python_loggingB(self): | ||
126 | # logs, no verbose | ||
127 | self.write_config('BBINCLUDELOGS = "yes"') | ||
128 | result = bitbake("logging-test -c pythontest_exec_func_python -f", | ||
129 | ignore_status = True) | ||
130 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
131 | # A sys.exit() should include the output | ||
132 | self.assertCount(result.output, "This is python stdout", 1) | ||
133 | |||
134 | def test_python_exec_func_python_loggingC(self): | ||
135 | # no logs, verbose | ||
136 | self.write_config('BBINCLUDELOGS = ""') | ||
137 | result = bitbake("logging-test -c pythontest_exec_func_python -f -v", | ||
138 | ignore_status = True) | ||
139 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
140 | self.assertCount(result.output, "This is python stdout", 1) | ||
141 | |||
142 | def test_python_exec_func_python_loggingD(self): | ||
143 | # logs, verbose | ||
144 | self.write_config('BBINCLUDELOGS = "yes"') | ||
145 | result = bitbake("logging-test -c pythontest_exec_func_python -f -v", | ||
146 | ignore_status = True) | ||
147 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
148 | self.assertCount(result.output, "This is python stdout", 1) | ||
149 | |||
150 | def test_python_fatal_loggingA(self): | ||
151 | # no logs, no verbose | ||
152 | self.write_config('BBINCLUDELOGS = ""') | ||
153 | result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True) | ||
154 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
155 | self.assertNotIn("This is python fatal test stdout", result.output) | ||
156 | self.assertCount(result.output, "This is a fatal error", 1) | ||
157 | |||
158 | def test_python_fatal_loggingB(self): | ||
159 | # logs, no verbose | ||
160 | self.write_config('BBINCLUDELOGS = "yes"') | ||
161 | result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True) | ||
162 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
163 | # A bb.fatal() should not include the output | ||
164 | self.assertNotIn("This is python fatal test stdout", result.output) | ||
165 | self.assertCount(result.output, "This is a fatal error", 1) | ||
166 | |||
167 | def test_python_fatal_loggingC(self): | ||
168 | # no logs, verbose | ||
169 | self.write_config('BBINCLUDELOGS = ""') | ||
170 | result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True) | ||
171 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
172 | self.assertCount(result.output, "This is python fatal test stdout", 1) | ||
173 | self.assertCount(result.output, "This is a fatal error", 1) | ||
174 | |||
175 | def test_python_fatal_loggingD(self): | ||
176 | # logs, verbose | ||
177 | self.write_config('BBINCLUDELOGS = "yes"') | ||
178 | result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True) | ||
179 | self.assertIn("ERROR: Logfile of failure stored in:", result.output) | ||
180 | self.assertCount(result.output, "This is python fatal test stdout", 1) | ||
181 | self.assertCount(result.output, "This is a fatal error", 1) | ||
182 | |||
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py index 79390acc0d..98e9f81661 100644 --- a/meta/lib/oeqa/selftest/cases/bbtests.py +++ b/meta/lib/oeqa/selftest/cases/bbtests.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -39,7 +41,7 @@ class BitbakeTests(OESelftestTestCase): | |||
39 | 41 | ||
40 | def test_event_handler(self): | 42 | def test_event_handler(self): |
41 | self.write_config("INHERIT += \"test_events\"") | 43 | self.write_config("INHERIT += \"test_events\"") |
42 | result = bitbake('m4-native') | 44 | result = bitbake('selftest-hello-native') |
43 | find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output) | 45 | find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output) |
44 | find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output) | 46 | find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output) |
45 | self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output) | 47 | self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output) |
@@ -47,11 +49,11 @@ class BitbakeTests(OESelftestTestCase): | |||
47 | self.assertNotIn('Test for bb.event.InvalidEvent', result.output) | 49 | self.assertNotIn('Test for bb.event.InvalidEvent', result.output) |
48 | 50 | ||
49 | def test_local_sstate(self): | 51 | def test_local_sstate(self): |
50 | bitbake('m4-native') | 52 | bitbake('selftest-hello-native') |
51 | bitbake('m4-native -cclean') | 53 | bitbake('selftest-hello-native -cclean') |
52 | result = bitbake('m4-native') | 54 | result = bitbake('selftest-hello-native') |
53 | find_setscene = re.search("m4-native.*do_.*_setscene", result.output) | 55 | find_setscene = re.search("selftest-hello-native.*do_.*_setscene", result.output) |
54 | self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output ) | 56 | self.assertTrue(find_setscene, msg = "No \"selftest-hello-native.*do_.*_setscene\" message found during bitbake selftest-hello-native. bitbake output: %s" % result.output ) |
55 | 57 | ||
56 | def test_bitbake_invalid_recipe(self): | 58 | def test_bitbake_invalid_recipe(self): |
57 | result = bitbake('-b asdf', ignore_status=True) | 59 | result = bitbake('-b asdf', ignore_status=True) |
@@ -63,15 +65,15 @@ class BitbakeTests(OESelftestTestCase): | |||
63 | 65 | ||
64 | def test_warnings_errors(self): | 66 | def test_warnings_errors(self): |
65 | result = bitbake('-b asdf', ignore_status=True) | 67 | result = bitbake('-b asdf', ignore_status=True) |
66 | find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output) | 68 | find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages*", result.output) |
67 | find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output) | 69 | find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages*", result.output) |
68 | self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output) | 70 | self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output) |
69 | self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output) | 71 | self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output) |
70 | 72 | ||
71 | def test_invalid_patch(self): | 73 | def test_invalid_patch(self): |
72 | # This patch should fail to apply. | 74 | # This patch should fail to apply. |
73 | self.write_recipeinc('man-db', 'FILESEXTRAPATHS_prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"') | 75 | self.write_recipeinc('man-db', 'FILESEXTRAPATHS:prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"') |
74 | self.write_config("INHERIT_remove = \"report-error\"") | 76 | self.write_config("INHERIT:remove = \"report-error\"") |
75 | result = bitbake('man-db -c patch', ignore_status=True) | 77 | result = bitbake('man-db -c patch', ignore_status=True) |
76 | self.delete_recipeinc('man-db') | 78 | self.delete_recipeinc('man-db') |
77 | bitbake('-cclean man-db') | 79 | bitbake('-cclean man-db') |
@@ -83,8 +85,10 @@ class BitbakeTests(OESelftestTestCase): | |||
83 | 85 | ||
84 | def test_force_task_1(self): | 86 | def test_force_task_1(self): |
85 | # test 1 from bug 5875 | 87 | # test 1 from bug 5875 |
88 | import uuid | ||
86 | test_recipe = 'zlib' | 89 | test_recipe = 'zlib' |
87 | test_data = "Microsoft Made No Profit From Anyone's Zunes Yo" | 90 | # Need to use uuid otherwise hash equivlance would change the workflow |
91 | test_data = "Microsoft Made No Profit From Anyone's Zunes Yo %s" % uuid.uuid1() | ||
88 | bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe) | 92 | bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe) |
89 | image_dir = bb_vars['D'] | 93 | image_dir = bb_vars['D'] |
90 | pkgsplit_dir = bb_vars['PKGDEST'] | 94 | pkgsplit_dir = bb_vars['PKGDEST'] |
@@ -139,19 +143,14 @@ class BitbakeTests(OESelftestTestCase): | |||
139 | self.write_recipeinc('man-db', data) | 143 | self.write_recipeinc('man-db', data) |
140 | self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" | 144 | self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" |
141 | SSTATE_DIR = \"${TOPDIR}/download-selftest\" | 145 | SSTATE_DIR = \"${TOPDIR}/download-selftest\" |
142 | INHERIT_remove = \"report-error\" | 146 | INHERIT:remove = \"report-error\" |
143 | """) | 147 | """) |
144 | self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) | 148 | self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) |
145 | 149 | ||
146 | bitbake('-ccleanall man-db') | ||
147 | result = bitbake('-c fetch man-db', ignore_status=True) | 150 | result = bitbake('-c fetch man-db', ignore_status=True) |
148 | bitbake('-ccleanall man-db') | ||
149 | self.delete_recipeinc('man-db') | 151 | self.delete_recipeinc('man-db') |
150 | self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output) | 152 | self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output) |
151 | self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output) | 153 | self.assertIn('Unable to get checksum for man-db SRC_URI entry invalid: file could not be found', result.output) |
152 | line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.') | ||
153 | self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \ | ||
154 | doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output) | ||
155 | 154 | ||
156 | def test_rename_downloaded_file(self): | 155 | def test_rename_downloaded_file(self): |
157 | # TODO unique dldir instead of using cleanall | 156 | # TODO unique dldir instead of using cleanall |
@@ -161,7 +160,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\" | |||
161 | """) | 160 | """) |
162 | self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) | 161 | self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) |
163 | 162 | ||
164 | data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"' | 163 | data = 'SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"' |
165 | self.write_recipeinc('aspell', data) | 164 | self.write_recipeinc('aspell', data) |
166 | result = bitbake('-f -c fetch aspell', ignore_status=True) | 165 | result = bitbake('-f -c fetch aspell', ignore_status=True) |
167 | self.delete_recipeinc('aspell') | 166 | self.delete_recipeinc('aspell') |
@@ -176,7 +175,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\" | |||
176 | self.assertIn('localconf', result.output) | 175 | self.assertIn('localconf', result.output) |
177 | 176 | ||
178 | def test_dry_run(self): | 177 | def test_dry_run(self): |
179 | result = runCmd('bitbake -n m4-native') | 178 | result = runCmd('bitbake -n selftest-hello-native') |
180 | self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output) | 179 | self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output) |
181 | 180 | ||
182 | def test_just_parse(self): | 181 | def test_just_parse(self): |
@@ -189,6 +188,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\" | |||
189 | self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output) | 188 | self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output) |
190 | 189 | ||
191 | def test_prefile(self): | 190 | def test_prefile(self): |
191 | # Test when the prefile does not exist | ||
192 | result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True) | ||
193 | self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output) | ||
194 | # Test when the prefile exists | ||
192 | preconf = os.path.join(self.builddir, 'conf/prefile.conf') | 195 | preconf = os.path.join(self.builddir, 'conf/prefile.conf') |
193 | self.track_for_cleanup(preconf) | 196 | self.track_for_cleanup(preconf) |
194 | ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"") | 197 | ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"") |
@@ -199,6 +202,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\" | |||
199 | self.assertIn('localconf', result.output) | 202 | self.assertIn('localconf', result.output) |
200 | 203 | ||
201 | def test_postfile(self): | 204 | def test_postfile(self): |
205 | # Test when the postfile does not exist | ||
206 | result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True) | ||
207 | self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output) | ||
208 | # Test when the postfile exists | ||
202 | postconf = os.path.join(self.builddir, 'conf/postfile.conf') | 209 | postconf = os.path.join(self.builddir, 'conf/postfile.conf') |
203 | self.track_for_cleanup(postconf) | 210 | self.track_for_cleanup(postconf) |
204 | ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"") | 211 | ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"") |
@@ -213,7 +220,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\" | |||
213 | def test_continue(self): | 220 | def test_continue(self): |
214 | self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" | 221 | self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" |
215 | SSTATE_DIR = \"${TOPDIR}/download-selftest\" | 222 | SSTATE_DIR = \"${TOPDIR}/download-selftest\" |
216 | INHERIT_remove = \"report-error\" | 223 | INHERIT:remove = \"report-error\" |
217 | """) | 224 | """) |
218 | self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) | 225 | self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) |
219 | self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" ) | 226 | self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" ) |
@@ -225,16 +232,21 @@ INHERIT_remove = \"report-error\" | |||
225 | self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output) | 232 | self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output) |
226 | 233 | ||
227 | def test_non_gplv3(self): | 234 | def test_non_gplv3(self): |
228 | self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"') | 235 | self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later" |
236 | require conf/distro/include/no-gplv3.inc | ||
237 | ''') | ||
229 | result = bitbake('selftest-ed', ignore_status=True) | 238 | result = bitbake('selftest-ed', ignore_status=True) |
230 | self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output)) | 239 | self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output)) |
231 | lic_dir = get_bb_var('LICENSE_DIRECTORY') | 240 | lic_dir = get_bb_var('LICENSE_DIRECTORY') |
232 | self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3'))) | 241 | arch = get_bb_var('SSTATE_PKGARCH') |
233 | self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2'))) | 242 | filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later') |
243 | self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename) | ||
244 | filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-or-later') | ||
245 | self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename) | ||
234 | 246 | ||
235 | def test_setscene_only(self): | 247 | def test_setscene_only(self): |
236 | """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)""" | 248 | """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)""" |
237 | test_recipe = 'ed' | 249 | test_recipe = 'selftest-hello-native' |
238 | 250 | ||
239 | bitbake(test_recipe) | 251 | bitbake(test_recipe) |
240 | bitbake('-c clean %s' % test_recipe) | 252 | bitbake('-c clean %s' % test_recipe) |
@@ -247,7 +259,7 @@ INHERIT_remove = \"report-error\" | |||
247 | 'Executed tasks were: %s' % (task, str(tasks))) | 259 | 'Executed tasks were: %s' % (task, str(tasks))) |
248 | 260 | ||
249 | def test_skip_setscene(self): | 261 | def test_skip_setscene(self): |
250 | test_recipe = 'ed' | 262 | test_recipe = 'selftest-hello-native' |
251 | 263 | ||
252 | bitbake(test_recipe) | 264 | bitbake(test_recipe) |
253 | bitbake('-c clean %s' % test_recipe) | 265 | bitbake('-c clean %s' % test_recipe) |
@@ -298,3 +310,68 @@ INHERIT_remove = \"report-error\" | |||
298 | 310 | ||
299 | test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe) | 311 | test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe) |
300 | self.assertEqual(expected_recipe_summary, test_recipe_summary_after) | 312 | self.assertEqual(expected_recipe_summary, test_recipe_summary_after) |
313 | |||
314 | def test_git_patchtool(self): | ||
315 | """ PATCHTOOL=git should work with non-git sources like tarballs | ||
316 | test recipe for the test must NOT containt git:// repository in SRC_URI | ||
317 | """ | ||
318 | test_recipe = "man-db" | ||
319 | self.write_recipeinc(test_recipe, 'PATCHTOOL=\"git\"') | ||
320 | src = get_bb_var("SRC_URI",test_recipe) | ||
321 | gitscm = re.search("git://", src) | ||
322 | self.assertFalse(gitscm, "test_git_patchtool pre-condition failed: {} test recipe contains git repo!".format(test_recipe)) | ||
323 | result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False) | ||
324 | fatal = re.search("fatal: not a git repository (or any of the parent directories)", result.output) | ||
325 | self.assertFalse(fatal, "Failed to patch using PATCHTOOL=\"git\"") | ||
326 | self.delete_recipeinc(test_recipe) | ||
327 | bitbake('-cclean {}'.format(test_recipe)) | ||
328 | |||
329 | def test_git_patchtool2(self): | ||
330 | """ Test if PATCHTOOL=git works with git repo and doesn't reinitialize it | ||
331 | """ | ||
332 | test_recipe = "gitrepotest" | ||
333 | src = get_bb_var("SRC_URI",test_recipe) | ||
334 | gitscm = re.search("git://", src) | ||
335 | self.assertTrue(gitscm, "test_git_patchtool pre-condition failed: {} test recipe doesn't contains git repo!".format(test_recipe)) | ||
336 | result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False) | ||
337 | srcdir = get_bb_var('S', test_recipe) | ||
338 | result = runCmd("git log", cwd = srcdir) | ||
339 | self.assertFalse("bitbake_patching_started" in result.output, msg = "Repository has been reinitialized. {}".format(srcdir)) | ||
340 | self.delete_recipeinc(test_recipe) | ||
341 | bitbake('-cclean {}'.format(test_recipe)) | ||
342 | |||
343 | |||
344 | def test_git_unpack_nonetwork(self): | ||
345 | """ | ||
346 | Test that a recipe with a floating tag that needs to be resolved upstream doesn't | ||
347 | access the network in a patch task run in a separate builld invocation | ||
348 | """ | ||
349 | |||
350 | # Enable the recipe to float using a distro override | ||
351 | self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"") | ||
352 | |||
353 | bitbake('gitunpackoffline -c fetch') | ||
354 | bitbake('gitunpackoffline -c patch') | ||
355 | |||
356 | def test_git_unpack_nonetwork_fail(self): | ||
357 | """ | ||
358 | Test that a recipe with a floating tag which doesn't call get_srcrev() in the fetcher | ||
359 | raises an error when the fetcher is called. | ||
360 | """ | ||
361 | |||
362 | # Enable the recipe to float using a distro override | ||
363 | self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"") | ||
364 | |||
365 | result = bitbake('gitunpackoffline-fail -c fetch', ignore_status=True) | ||
366 | self.assertTrue(re.search("Recipe uses a floating tag/branch .* for repo .* without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev()", result.output), msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output) | ||
367 | |||
368 | def test_unexpanded_variable_in_path(self): | ||
369 | """ | ||
370 | Test that bitbake fails if directory contains unexpanded bitbake variable in the name | ||
371 | """ | ||
372 | recipe_name = "gitunpackoffline" | ||
373 | self.write_config('PV:pn-gitunpackoffline:append = "+${UNDEFVAL}"') | ||
374 | result = bitbake('{}'.format(recipe_name), ignore_status=True) | ||
375 | self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path") | ||
376 | self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution", | ||
377 | result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output) | ||
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py index 821f52f5a8..1688eabe4e 100644 --- a/meta/lib/oeqa/selftest/cases/binutils.py +++ b/meta/lib/oeqa/selftest/cases/binutils.py | |||
@@ -1,12 +1,14 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
1 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | ||
2 | import os | 6 | import os |
3 | import sys | 7 | import time |
4 | import re | ||
5 | import logging | ||
6 | from oeqa.core.decorator import OETestTag | 8 | from oeqa.core.decorator import OETestTag |
7 | from oeqa.core.case import OEPTestResultTestCase | 9 | from oeqa.core.case import OEPTestResultTestCase |
8 | from oeqa.selftest.case import OESelftestTestCase | 10 | from oeqa.selftest.case import OESelftestTestCase |
9 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars | 11 | from oeqa.utils.commands import bitbake, get_bb_vars |
10 | 12 | ||
11 | def parse_values(content): | 13 | def parse_values(content): |
12 | for i in content: | 14 | for i in content: |
@@ -35,15 +37,19 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase): | |||
35 | bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) | 37 | bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) |
36 | builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] | 38 | builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] |
37 | 39 | ||
40 | start_time = time.time() | ||
41 | |||
38 | bitbake("{0} -c check".format(recipe)) | 42 | bitbake("{0} -c check".format(recipe)) |
39 | 43 | ||
44 | end_time = time.time() | ||
45 | |||
40 | sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite)) | 46 | sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite)) |
41 | if not os.path.exists(sumspath): | 47 | if not os.path.exists(sumspath): |
42 | sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite)) | 48 | sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite)) |
43 | logpath = os.path.splitext(sumspath)[0] + ".log" | 49 | logpath = os.path.splitext(sumspath)[0] + ".log" |
44 | 50 | ||
45 | ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite | 51 | ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite |
46 | self.ptest_section(ptestsuite, logfile = logpath) | 52 | self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath) |
47 | with open(sumspath, "r") as f: | 53 | with open(sumspath, "r") as f: |
48 | for test, result in parse_values(f): | 54 | for test, result in parse_values(f): |
49 | self.ptest_result(ptestsuite, test, result) | 55 | self.ptest_result(ptestsuite, test, result) |
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py index d865da6252..2d55994916 100644 --- a/meta/lib/oeqa/selftest/cases/buildhistory.py +++ b/meta/lib/oeqa/selftest/cases/buildhistory.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py index 3495bee986..31dafaa9c5 100644 --- a/meta/lib/oeqa/selftest/cases/buildoptions.py +++ b/meta/lib/oeqa/selftest/cases/buildoptions.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -9,8 +11,10 @@ import shutil | |||
9 | import tempfile | 11 | import tempfile |
10 | from oeqa.selftest.case import OESelftestTestCase | 12 | from oeqa.selftest.case import OESelftestTestCase |
11 | from oeqa.selftest.cases.buildhistory import BuildhistoryBase | 13 | from oeqa.selftest.cases.buildhistory import BuildhistoryBase |
12 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars | 14 | from oeqa.core.decorator.data import skipIfMachine |
15 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars | ||
13 | import oeqa.utils.ftools as ftools | 16 | import oeqa.utils.ftools as ftools |
17 | from oeqa.core.decorator import OETestTag | ||
14 | 18 | ||
15 | class ImageOptionsTests(OESelftestTestCase): | 19 | class ImageOptionsTests(OESelftestTestCase): |
16 | 20 | ||
@@ -50,23 +54,23 @@ class ImageOptionsTests(OESelftestTestCase): | |||
50 | def test_read_only_image(self): | 54 | def test_read_only_image(self): |
51 | distro_features = get_bb_var('DISTRO_FEATURES') | 55 | distro_features = get_bb_var('DISTRO_FEATURES') |
52 | if not ('x11' in distro_features and 'opengl' in distro_features): | 56 | if not ('x11' in distro_features and 'opengl' in distro_features): |
53 | self.skipTest('core-image-sato requires x11 and opengl in distro features') | 57 | self.skipTest('core-image-sato/weston requires x11 and opengl in distro features') |
54 | self.write_config('IMAGE_FEATURES += "read-only-rootfs"') | 58 | self.write_config('IMAGE_FEATURES += "read-only-rootfs"') |
55 | bitbake("core-image-sato") | 59 | bitbake("core-image-sato core-image-weston") |
56 | # do_image will fail if there are any pending postinsts | 60 | # do_image will fail if there are any pending postinsts |
57 | 61 | ||
58 | class DiskMonTest(OESelftestTestCase): | 62 | class DiskMonTest(OESelftestTestCase): |
59 | 63 | ||
60 | def test_stoptask_behavior(self): | 64 | def test_stoptask_behavior(self): |
61 | self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"') | 65 | self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"') |
62 | res = bitbake("delay -c delay", ignore_status = True) | 66 | res = bitbake("delay -c delay", ignore_status = True) |
63 | self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output) | 67 | self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output) |
64 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) | 68 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) |
65 | self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"') | 69 | self.write_config('BB_DISKMON_DIRS = "HALT,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"') |
66 | res = bitbake("delay -c delay", ignore_status = True) | 70 | res = bitbake("delay -c delay", ignore_status = True) |
67 | self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output) | 71 | self.assertTrue('ERROR: Immediately halt since the disk space monitor action is "HALT"!' in res.output, "Tasks should have been halted immediately. Disk monitor is set to HALT: %s" % res.output) |
68 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) | 72 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) |
69 | self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"') | 73 | self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"') |
70 | res = bitbake("delay -c delay") | 74 | res = bitbake("delay -c delay") |
71 | self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output) | 75 | self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output) |
72 | 76 | ||
@@ -78,9 +82,9 @@ class SanityOptionsTest(OESelftestTestCase): | |||
78 | 82 | ||
79 | def test_options_warnqa_errorqa_switch(self): | 83 | def test_options_warnqa_errorqa_switch(self): |
80 | 84 | ||
81 | self.write_config("INHERIT_remove = \"report-error\"") | 85 | self.write_config("INHERIT:remove = \"report-error\"") |
82 | if "packages-list" not in get_bb_var("ERROR_QA"): | 86 | if "packages-list" not in get_bb_var("ERROR_QA"): |
83 | self.append_config("ERROR_QA_append = \" packages-list\"") | 87 | self.append_config("ERROR_QA:append = \" packages-list\"") |
84 | 88 | ||
85 | self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') | 89 | self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') |
86 | self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') | 90 | self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') |
@@ -90,8 +94,8 @@ class SanityOptionsTest(OESelftestTestCase): | |||
90 | self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) | 94 | self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) |
91 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) | 95 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) |
92 | self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') | 96 | self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') |
93 | self.append_config('ERROR_QA_remove = "packages-list"') | 97 | self.append_config('ERROR_QA:remove = "packages-list"') |
94 | self.append_config('WARN_QA_append = " packages-list"') | 98 | self.append_config('WARN_QA:append = " packages-list"') |
95 | res = bitbake("xcursor-transparent-theme -f -c package") | 99 | res = bitbake("xcursor-transparent-theme -f -c package") |
96 | self.delete_recipeinc('xcursor-transparent-theme') | 100 | self.delete_recipeinc('xcursor-transparent-theme') |
97 | line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") | 101 | line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") |
@@ -148,19 +152,48 @@ class BuildhistoryTests(BuildhistoryBase): | |||
148 | self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) | 152 | self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) |
149 | self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error) | 153 | self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error) |
150 | 154 | ||
155 | def test_fileinfo(self): | ||
156 | self.config_buildhistory() | ||
157 | bitbake('hicolor-icon-theme') | ||
158 | history_dir = get_bb_var('BUILDHISTORY_DIR_PACKAGE', 'hicolor-icon-theme') | ||
159 | self.assertTrue(os.path.isdir(history_dir), 'buildhistory dir was not created.') | ||
160 | |||
161 | def load_bh(f): | ||
162 | d = {} | ||
163 | for line in open(f): | ||
164 | split = [s.strip() for s in line.split('=', 1)] | ||
165 | if len(split) > 1: | ||
166 | d[split[0]] = split[1] | ||
167 | return d | ||
168 | |||
169 | data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme', 'latest')) | ||
170 | self.assertIn('FILELIST', data) | ||
171 | self.assertEqual(data['FILELIST'], '/usr/share/icons/hicolor/index.theme') | ||
172 | self.assertGreater(int(data['PKGSIZE']), 0) | ||
173 | |||
174 | data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest')) | ||
175 | if 'FILELIST' in data: | ||
176 | self.assertEqual(data['FILELIST'], '') | ||
177 | self.assertEqual(int(data['PKGSIZE']), 0) | ||
178 | |||
151 | class ArchiverTest(OESelftestTestCase): | 179 | class ArchiverTest(OESelftestTestCase): |
152 | def test_arch_work_dir_and_export_source(self): | 180 | def test_arch_work_dir_and_export_source(self): |
153 | """ | 181 | """ |
154 | Test for archiving the work directory and exporting the source files. | 182 | Test for archiving the work directory and exporting the source files. |
155 | """ | 183 | """ |
156 | self.write_config("INHERIT += \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"") | 184 | self.write_config(""" |
185 | INHERIT += "archiver" | ||
186 | PACKAGE_CLASSES = "package_rpm" | ||
187 | ARCHIVER_MODE[src] = "original" | ||
188 | ARCHIVER_MODE[srpm] = "1" | ||
189 | """) | ||
157 | res = bitbake("xcursor-transparent-theme", ignore_status=True) | 190 | res = bitbake("xcursor-transparent-theme", ignore_status=True) |
158 | self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output) | 191 | self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output) |
159 | deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC') | 192 | deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC') |
160 | pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*") | 193 | pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*") |
161 | src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm" | 194 | src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm" |
162 | tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.gz" | 195 | tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.xz" |
163 | self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under %s/allarch*/xcursor*" % deploy_dir_src) | 196 | self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.xz files under %s/allarch*/xcursor*" % deploy_dir_src) |
164 | 197 | ||
165 | class ToolchainOptions(OESelftestTestCase): | 198 | class ToolchainOptions(OESelftestTestCase): |
166 | def test_toolchain_fortran(self): | 199 | def test_toolchain_fortran(self): |
@@ -168,10 +201,11 @@ class ToolchainOptions(OESelftestTestCase): | |||
168 | Test that Fortran works by building a Hello, World binary. | 201 | Test that Fortran works by building a Hello, World binary. |
169 | """ | 202 | """ |
170 | 203 | ||
171 | features = 'FORTRAN_forcevariable = ",fortran"\n' | 204 | features = 'FORTRAN:forcevariable = ",fortran"\n' |
172 | self.write_config(features) | 205 | self.write_config(features) |
173 | bitbake('fortran-helloworld') | 206 | bitbake('fortran-helloworld') |
174 | 207 | ||
208 | @OETestTag("yocto-mirrors") | ||
175 | class SourceMirroring(OESelftestTestCase): | 209 | class SourceMirroring(OESelftestTestCase): |
176 | # Can we download everything from the Yocto Sources Mirror over http only | 210 | # Can we download everything from the Yocto Sources Mirror over http only |
177 | def test_yocto_source_mirror(self): | 211 | def test_yocto_source_mirror(self): |
@@ -197,3 +231,9 @@ PREMIRRORS = "\\ | |||
197 | 231 | ||
198 | bitbake("world --runall fetch") | 232 | bitbake("world --runall fetch") |
199 | 233 | ||
234 | |||
235 | class Poisoning(OESelftestTestCase): | ||
236 | def test_poisoning(self): | ||
237 | res = bitbake("poison", ignore_status=True) | ||
238 | self.assertNotEqual(res.status, 0) | ||
239 | self.assertTrue("is unsafe for cross-compilation" in res.output) | ||
diff --git a/meta/lib/oeqa/selftest/cases/c_cpp.py b/meta/lib/oeqa/selftest/cases/c_cpp.py new file mode 100644 index 0000000000..9a70ce29f5 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/c_cpp.py | |||
@@ -0,0 +1,60 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.core.decorator.data import skipIfNotQemuUsermode | ||
9 | from oeqa.utils.commands import bitbake | ||
10 | |||
11 | |||
12 | class CCppTests(OESelftestTestCase): | ||
13 | |||
14 | @skipIfNotQemuUsermode() | ||
15 | def _qemu_usermode(self, recipe_name): | ||
16 | self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name) | ||
17 | bitbake("%s -c run_tests" % recipe_name) | ||
18 | |||
19 | @skipIfNotQemuUsermode() | ||
20 | def _qemu_usermode_failing(self, recipe_name): | ||
21 | config = 'PACKAGECONFIG:pn-%s = "failing_test"' % recipe_name | ||
22 | self.write_config(config) | ||
23 | self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name) | ||
24 | result = bitbake("%s -c run_tests" % recipe_name, ignore_status=True) | ||
25 | self.assertNotEqual(0, result.status, "command: %s is expected to fail but passed, status: %s, output: %s, error: %s" % ( | ||
26 | result.command, result.status, result.output, result.error)) | ||
27 | |||
28 | |||
29 | class CMakeTests(CCppTests): | ||
30 | def test_cmake_qemu(self): | ||
31 | """Test for cmake-qemu.bbclass good case | ||
32 | |||
33 | compile the cmake-example and verify the CTests pass in qemu-user. | ||
34 | qemu-user is configured by CMAKE_CROSSCOMPILING_EMULATOR. | ||
35 | """ | ||
36 | self._qemu_usermode("cmake-example") | ||
37 | |||
38 | def test_cmake_qemu_failing(self): | ||
39 | """Test for cmake-qemu.bbclass bad case | ||
40 | |||
41 | Break the comparison in the test code and verify the CTests do not pass. | ||
42 | """ | ||
43 | self._qemu_usermode_failing("cmake-example") | ||
44 | |||
45 | |||
46 | class MesonTests(CCppTests): | ||
47 | def test_meson_qemu(self): | ||
48 | """Test the qemu-user feature of the meson.bbclass good case | ||
49 | |||
50 | compile the meson-example and verify the Unit Test pass in qemu-user. | ||
51 | qemu-user is configured by meson's exe_wrapper option. | ||
52 | """ | ||
53 | self._qemu_usermode("meson-example") | ||
54 | |||
55 | def test_meson_qemu_failing(self): | ||
56 | """Test the qemu-user feature of the meson.bbclass bad case | ||
57 | |||
58 | Break the comparison in the test code and verify the Unit Test does not pass in qemu-user. | ||
59 | """ | ||
60 | self._qemu_usermode_failing("meson-example") | ||
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py index 79cc8a0f2e..23c0a1408a 100644 --- a/meta/lib/oeqa/selftest/cases/containerimage.py +++ b/meta/lib/oeqa/selftest/cases/containerimage.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -13,7 +15,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd | |||
13 | # The only package added to the image is container_image_testpkg, which | 15 | # The only package added to the image is container_image_testpkg, which |
14 | # contains one file. However, due to some other things not cleaning up during | 16 | # contains one file. However, due to some other things not cleaning up during |
15 | # rootfs creation, there is some cruft. Ideally bugs will be filed and the | 17 | # rootfs creation, there is some cruft. Ideally bugs will be filed and the |
16 | # cruft removed, but for now we whitelist some known set. | 18 | # cruft removed, but for now we ignore some known set. |
17 | # | 19 | # |
18 | # Also for performance reasons we're only checking the cruft when using ipk. | 20 | # Also for performance reasons we're only checking the cruft when using ipk. |
19 | # When using deb, and rpm it is a bit different and we could test all | 21 | # When using deb, and rpm it is a bit different and we could test all |
@@ -22,7 +24,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd | |||
22 | # | 24 | # |
23 | class ContainerImageTests(OESelftestTestCase): | 25 | class ContainerImageTests(OESelftestTestCase): |
24 | 26 | ||
25 | # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that | 27 | # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that |
26 | # the conversion type bar gets added as a dep as well | 28 | # the conversion type bar gets added as a dep as well |
27 | def test_expected_files(self): | 29 | def test_expected_files(self): |
28 | 30 | ||
@@ -43,7 +45,7 @@ PACKAGE_CLASSES = "package_ipk" | |||
43 | IMAGE_FEATURES = "" | 45 | IMAGE_FEATURES = "" |
44 | IMAGE_BUILDINFO_FILE = "" | 46 | IMAGE_BUILDINFO_FILE = "" |
45 | INIT_MANAGER = "sysvinit" | 47 | INIT_MANAGER = "sysvinit" |
46 | IMAGE_INSTALL_remove = "ssh-pregen-hostkeys" | 48 | IMAGE_INSTALL:remove = "ssh-pregen-hostkeys" |
47 | 49 | ||
48 | """) | 50 | """) |
49 | 51 | ||
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py index 3f343a2841..60cecd1328 100644 --- a/meta/lib/oeqa/selftest/cases/cve_check.py +++ b/meta/lib/oeqa/selftest/cases/cve_check.py | |||
@@ -1,9 +1,19 @@ | |||
1 | from oe.cve_check import Version | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import json | ||
8 | import os | ||
2 | from oeqa.selftest.case import OESelftestTestCase | 9 | from oeqa.selftest.case import OESelftestTestCase |
10 | from oeqa.utils.commands import bitbake, get_bb_vars | ||
3 | 11 | ||
4 | class CVECheck(OESelftestTestCase): | 12 | class CVECheck(OESelftestTestCase): |
5 | 13 | ||
6 | def test_version_compare(self): | 14 | def test_version_compare(self): |
15 | from oe.cve_check import Version | ||
16 | |||
7 | result = Version("100") > Version("99") | 17 | result = Version("100") > Version("99") |
8 | self.assertTrue( result, msg="Failed to compare version '100' > '99'") | 18 | self.assertTrue( result, msg="Failed to compare version '100' > '99'") |
9 | result = Version("2.3.1") > Version("2.2.3") | 19 | result = Version("2.3.1") > Version("2.2.3") |
@@ -34,3 +44,199 @@ class CVECheck(OESelftestTestCase): | |||
34 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'") | 44 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'") |
35 | result = Version("1.0b","alphabetical") > Version("1.0","alphabetical") | 45 | result = Version("1.0b","alphabetical") > Version("1.0","alphabetical") |
36 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'") | 46 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'") |
47 | |||
48 | # consider the trailing "p" and "patch" as patched released when comparing | ||
49 | result = Version("1.0","patch") < Version("1.0p1","patch") | ||
50 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0' < '1.0p1'") | ||
51 | result = Version("1.0p2","patch") > Version("1.0p1","patch") | ||
52 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'") | ||
53 | result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch") | ||
54 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'") | ||
55 | |||
56 | |||
57 | def test_convert_cve_version(self): | ||
58 | from oe.cve_check import convert_cve_version | ||
59 | |||
60 | # Default format | ||
61 | self.assertEqual(convert_cve_version("8.3"), "8.3") | ||
62 | self.assertEqual(convert_cve_version(""), "") | ||
63 | |||
64 | # OpenSSL format version | ||
65 | self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t") | ||
66 | |||
67 | # OpenSSH format | ||
68 | self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1") | ||
69 | self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22") | ||
70 | |||
71 | # Linux kernel format | ||
72 | self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8") | ||
73 | self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31") | ||
74 | |||
75 | |||
76 | def test_recipe_report_json(self): | ||
77 | config = """ | ||
78 | INHERIT += "cve-check" | ||
79 | CVE_CHECK_FORMAT_JSON = "1" | ||
80 | """ | ||
81 | self.write_config(config) | ||
82 | |||
83 | vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
84 | summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
85 | recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json") | ||
86 | |||
87 | try: | ||
88 | os.remove(summary_json) | ||
89 | os.remove(recipe_json) | ||
90 | except FileNotFoundError: | ||
91 | pass | ||
92 | |||
93 | bitbake("m4-native -c cve_check") | ||
94 | |||
95 | def check_m4_json(filename): | ||
96 | with open(filename) as f: | ||
97 | report = json.load(f) | ||
98 | self.assertEqual(report["version"], "1") | ||
99 | self.assertEqual(len(report["package"]), 1) | ||
100 | package = report["package"][0] | ||
101 | self.assertEqual(package["name"], "m4-native") | ||
102 | found_cves = { issue["id"]: issue["status"] for issue in package["issue"]} | ||
103 | self.assertIn("CVE-2008-1687", found_cves) | ||
104 | self.assertEqual(found_cves["CVE-2008-1687"], "Patched") | ||
105 | |||
106 | self.assertExists(summary_json) | ||
107 | check_m4_json(summary_json) | ||
108 | self.assertExists(recipe_json) | ||
109 | check_m4_json(recipe_json) | ||
110 | |||
111 | |||
112 | def test_image_json(self): | ||
113 | config = """ | ||
114 | INHERIT += "cve-check" | ||
115 | CVE_CHECK_FORMAT_JSON = "1" | ||
116 | """ | ||
117 | self.write_config(config) | ||
118 | |||
119 | vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
120 | report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
121 | print(report_json) | ||
122 | try: | ||
123 | os.remove(report_json) | ||
124 | except FileNotFoundError: | ||
125 | pass | ||
126 | |||
127 | bitbake("core-image-minimal-initramfs") | ||
128 | self.assertExists(report_json) | ||
129 | |||
130 | # Check that the summary report lists at least one package | ||
131 | with open(report_json) as f: | ||
132 | report = json.load(f) | ||
133 | self.assertEqual(report["version"], "1") | ||
134 | self.assertGreater(len(report["package"]), 1) | ||
135 | |||
136 | # Check that a random recipe wrote a recipe report to deploy/cve/ | ||
137 | recipename = report["package"][0]["name"] | ||
138 | recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json") | ||
139 | self.assertExists(recipe_report) | ||
140 | with open(recipe_report) as f: | ||
141 | report = json.load(f) | ||
142 | self.assertEqual(report["version"], "1") | ||
143 | self.assertEqual(len(report["package"]), 1) | ||
144 | self.assertEqual(report["package"][0]["name"], recipename) | ||
145 | |||
146 | |||
147 | def test_recipe_report_json_unpatched(self): | ||
148 | config = """ | ||
149 | INHERIT += "cve-check" | ||
150 | CVE_CHECK_FORMAT_JSON = "1" | ||
151 | CVE_CHECK_REPORT_PATCHED = "0" | ||
152 | """ | ||
153 | self.write_config(config) | ||
154 | |||
155 | vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
156 | summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
157 | recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json") | ||
158 | |||
159 | try: | ||
160 | os.remove(summary_json) | ||
161 | os.remove(recipe_json) | ||
162 | except FileNotFoundError: | ||
163 | pass | ||
164 | |||
165 | bitbake("m4-native -c cve_check") | ||
166 | |||
167 | def check_m4_json(filename): | ||
168 | with open(filename) as f: | ||
169 | report = json.load(f) | ||
170 | self.assertEqual(report["version"], "1") | ||
171 | self.assertEqual(len(report["package"]), 1) | ||
172 | package = report["package"][0] | ||
173 | self.assertEqual(package["name"], "m4-native") | ||
174 | #m4 had only Patched CVEs, so the issues array will be empty | ||
175 | self.assertEqual(package["issue"], []) | ||
176 | |||
177 | self.assertExists(summary_json) | ||
178 | check_m4_json(summary_json) | ||
179 | self.assertExists(recipe_json) | ||
180 | check_m4_json(recipe_json) | ||
181 | |||
182 | |||
183 | def test_recipe_report_json_ignored(self): | ||
184 | config = """ | ||
185 | INHERIT += "cve-check" | ||
186 | CVE_CHECK_FORMAT_JSON = "1" | ||
187 | CVE_CHECK_REPORT_PATCHED = "1" | ||
188 | """ | ||
189 | self.write_config(config) | ||
190 | |||
191 | vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
192 | summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
193 | recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json") | ||
194 | |||
195 | try: | ||
196 | os.remove(summary_json) | ||
197 | os.remove(recipe_json) | ||
198 | except FileNotFoundError: | ||
199 | pass | ||
200 | |||
201 | bitbake("logrotate -c cve_check") | ||
202 | |||
203 | def check_m4_json(filename): | ||
204 | with open(filename) as f: | ||
205 | report = json.load(f) | ||
206 | self.assertEqual(report["version"], "1") | ||
207 | self.assertEqual(len(report["package"]), 1) | ||
208 | package = report["package"][0] | ||
209 | self.assertEqual(package["name"], "logrotate") | ||
210 | found_cves = {} | ||
211 | for issue in package["issue"]: | ||
212 | found_cves[issue["id"]] = { | ||
213 | "status" : issue["status"], | ||
214 | "detail" : issue["detail"] if "detail" in issue else "", | ||
215 | "description" : issue["description"] if "description" in issue else "" | ||
216 | } | ||
217 | # m4 CVE should not be in logrotate | ||
218 | self.assertNotIn("CVE-2008-1687", found_cves) | ||
219 | # logrotate has both Patched and Ignored CVEs | ||
220 | self.assertIn("CVE-2011-1098", found_cves) | ||
221 | self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched") | ||
222 | self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0) | ||
223 | self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0) | ||
224 | detail = "not-applicable-platform" | ||
225 | description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used" | ||
226 | self.assertIn("CVE-2011-1548", found_cves) | ||
227 | self.assertEqual(found_cves["CVE-2011-1548"]["status"], "Ignored") | ||
228 | self.assertEqual(found_cves["CVE-2011-1548"]["detail"], detail) | ||
229 | self.assertEqual(found_cves["CVE-2011-1548"]["description"], description) | ||
230 | self.assertIn("CVE-2011-1549", found_cves) | ||
231 | self.assertEqual(found_cves["CVE-2011-1549"]["status"], "Ignored") | ||
232 | self.assertEqual(found_cves["CVE-2011-1549"]["detail"], detail) | ||
233 | self.assertEqual(found_cves["CVE-2011-1549"]["description"], description) | ||
234 | self.assertIn("CVE-2011-1550", found_cves) | ||
235 | self.assertEqual(found_cves["CVE-2011-1550"]["status"], "Ignored") | ||
236 | self.assertEqual(found_cves["CVE-2011-1550"]["detail"], detail) | ||
237 | self.assertEqual(found_cves["CVE-2011-1550"]["description"], description) | ||
238 | |||
239 | self.assertExists(summary_json) | ||
240 | check_m4_json(summary_json) | ||
241 | self.assertExists(recipe_json) | ||
242 | check_m4_json(recipe_json) | ||
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py new file mode 100644 index 0000000000..505b4be837 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/debuginfod.py | |||
@@ -0,0 +1,158 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | import os | ||
7 | import socketserver | ||
8 | import subprocess | ||
9 | import time | ||
10 | import urllib | ||
11 | import pathlib | ||
12 | |||
13 | from oeqa.core.decorator import OETestTag | ||
14 | from oeqa.selftest.case import OESelftestTestCase | ||
15 | from oeqa.utils.commands import bitbake, get_bb_var, runqemu | ||
16 | |||
17 | |||
18 | class Debuginfod(OESelftestTestCase): | ||
19 | |||
20 | def wait_for_debuginfod(self, port): | ||
21 | """ | ||
22 | debuginfod takes time to scan the packages and requesting too early may | ||
23 | result in a test failure if the right packages haven't been scanned yet. | ||
24 | |||
25 | Request the metrics endpoint periodically and wait for there to be no | ||
26 | busy scanning threads. | ||
27 | |||
28 | Returns if debuginfod is ready, raises an exception if not within the | ||
29 | timeout. | ||
30 | """ | ||
31 | |||
32 | # Wait two minutes | ||
33 | countdown = 24 | ||
34 | delay = 5 | ||
35 | latest = None | ||
36 | |||
37 | while countdown: | ||
38 | self.logger.info("waiting...") | ||
39 | time.sleep(delay) | ||
40 | |||
41 | self.logger.info("polling server") | ||
42 | if self.debuginfod.poll(): | ||
43 | self.logger.info("server dead") | ||
44 | self.debuginfod.communicate() | ||
45 | self.fail("debuginfod terminated unexpectedly") | ||
46 | self.logger.info("server alive") | ||
47 | |||
48 | try: | ||
49 | with urllib.request.urlopen("http://localhost:%d/metrics" % port, timeout=10) as f: | ||
50 | for line in f.read().decode("ascii").splitlines(): | ||
51 | key, value = line.rsplit(" ", 1) | ||
52 | if key == "thread_busy{role=\"scan\"}": | ||
53 | latest = int(value) | ||
54 | self.logger.info("Waiting for %d scan jobs to finish" % latest) | ||
55 | if latest == 0: | ||
56 | return | ||
57 | except urllib.error.URLError as e: | ||
58 | # TODO: how to catch just timeouts? | ||
59 | self.logger.error(e) | ||
60 | |||
61 | countdown -= 1 | ||
62 | |||
63 | raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest) | ||
64 | |||
65 | def start_debuginfod(self): | ||
66 | # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot | ||
67 | |||
68 | # Save some useful paths for later | ||
69 | native_sysroot = pathlib.Path(get_bb_var("RECIPE_SYSROOT_NATIVE", "elfutils-native")) | ||
70 | native_bindir = native_sysroot / "usr" / "bin" | ||
71 | self.debuginfod = native_bindir / "debuginfod" | ||
72 | self.debuginfod_find = native_bindir / "debuginfod-find" | ||
73 | |||
74 | cmd = [ | ||
75 | self.debuginfod, | ||
76 | "--verbose", | ||
77 | # In-memory database, this is a one-shot test | ||
78 | "--database=:memory:", | ||
79 | # Don't use all the host cores | ||
80 | "--concurrency=8", | ||
81 | "--connection-pool=8", | ||
82 | # Disable rescanning, this is a one-shot test | ||
83 | "--rescan-time=0", | ||
84 | "--groom-time=0", | ||
85 | get_bb_var("DEPLOY_DIR"), | ||
86 | ] | ||
87 | |||
88 | format = get_bb_var("PACKAGE_CLASSES").split()[0] | ||
89 | if format == "package_deb": | ||
90 | cmd.append("--scan-deb-dir") | ||
91 | elif format == "package_ipk": | ||
92 | cmd.append("--scan-deb-dir") | ||
93 | elif format == "package_rpm": | ||
94 | cmd.append("--scan-rpm-dir") | ||
95 | else: | ||
96 | self.fail("Unknown package class %s" % format) | ||
97 | |||
98 | # Find a free port. Racey but the window is small. | ||
99 | with socketserver.TCPServer(("localhost", 0), None) as s: | ||
100 | self.port = s.server_address[1] | ||
101 | cmd.append("--port=%d" % self.port) | ||
102 | |||
103 | self.logger.info(f"Starting server {cmd}") | ||
104 | self.debuginfod = subprocess.Popen(cmd, env={}) | ||
105 | self.wait_for_debuginfod(self.port) | ||
106 | |||
107 | |||
108 | def test_debuginfod_native(self): | ||
109 | """ | ||
110 | Test debuginfod outside of qemu, by building a package and looking up a | ||
111 | binary's debuginfo using elfutils-native. | ||
112 | """ | ||
113 | |||
114 | self.write_config(""" | ||
115 | TMPDIR = "${TOPDIR}/tmp-debuginfod" | ||
116 | DISTRO_FEATURES:append = " debuginfod" | ||
117 | """) | ||
118 | bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package") | ||
119 | |||
120 | try: | ||
121 | self.start_debuginfod() | ||
122 | |||
123 | env = os.environ.copy() | ||
124 | env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port | ||
125 | |||
126 | pkgs = pathlib.Path(get_bb_var("PKGDEST", "xz")) | ||
127 | cmd = (self.debuginfod_find, "debuginfo", pkgs / "xz" / "usr" / "bin" / "xz.xz") | ||
128 | self.logger.info(f"Starting client {cmd}") | ||
129 | output = subprocess.check_output(cmd, env=env, text=True) | ||
130 | # This should be more comprehensive | ||
131 | self.assertIn("/.cache/debuginfod_client/", output) | ||
132 | finally: | ||
133 | self.debuginfod.kill() | ||
134 | |||
135 | @OETestTag("runqemu") | ||
136 | def test_debuginfod_qemu(self): | ||
137 | """ | ||
138 | Test debuginfod-find inside a qemu, talking to a debuginfod on the host. | ||
139 | """ | ||
140 | |||
141 | self.write_config(""" | ||
142 | TMPDIR = "${TOPDIR}/tmp-debuginfod" | ||
143 | DISTRO_FEATURES:append = " debuginfod" | ||
144 | CORE_IMAGE_EXTRA_INSTALL += "elfutils xz" | ||
145 | """) | ||
146 | bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot") | ||
147 | |||
148 | try: | ||
149 | self.start_debuginfod() | ||
150 | |||
151 | with runqemu("core-image-minimal", runqemuparams="nographic") as qemu: | ||
152 | cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port) | ||
153 | self.logger.info(f"Starting client {cmd}") | ||
154 | status, output = qemu.run_serial(cmd) | ||
155 | # This should be more comprehensive | ||
156 | self.assertIn("/.cache/debuginfod_client/", output) | ||
157 | finally: | ||
158 | self.debuginfod.kill() | ||
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py index 3385546e8e..51949e3c93 100644 --- a/meta/lib/oeqa/selftest/cases/devtool.py +++ b/meta/lib/oeqa/selftest/cases/devtool.py | |||
@@ -1,18 +1,23 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
7 | import errno | ||
5 | import os | 8 | import os |
6 | import re | 9 | import re |
7 | import shutil | 10 | import shutil |
8 | import tempfile | 11 | import tempfile |
9 | import glob | 12 | import glob |
10 | import fnmatch | 13 | import fnmatch |
14 | import unittest | ||
15 | import json | ||
11 | 16 | ||
12 | import oeqa.utils.ftools as ftools | ||
13 | from oeqa.selftest.case import OESelftestTestCase | 17 | from oeqa.selftest.case import OESelftestTestCase |
14 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer | 18 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer |
15 | from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer | 19 | from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer |
20 | from oeqa.core.decorator import OETestTag | ||
16 | 21 | ||
17 | oldmetapath = None | 22 | oldmetapath = None |
18 | 23 | ||
@@ -24,6 +29,9 @@ def setUpModule(): | |||
24 | corecopydir = os.path.join(templayerdir, 'core-copy') | 29 | corecopydir = os.path.join(templayerdir, 'core-copy') |
25 | bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf') | 30 | bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf') |
26 | edited_layers = [] | 31 | edited_layers = [] |
32 | # make sure user doesn't have a local workspace | ||
33 | result = runCmd('bitbake-layers show-layers') | ||
34 | assert "workspacelayer" not in result.output, "Devtool test suite cannot be run with a local workspace directory" | ||
27 | 35 | ||
28 | # We need to take a copy of the meta layer so we can modify it and not | 36 | # We need to take a copy of the meta layer so we can modify it and not |
29 | # have any races against other tests that might be running in parallel | 37 | # have any races against other tests that might be running in parallel |
@@ -38,10 +46,17 @@ def setUpModule(): | |||
38 | canonical_layerpath = os.path.realpath(canonical_layerpath) + '/' | 46 | canonical_layerpath = os.path.realpath(canonical_layerpath) + '/' |
39 | edited_layers.append(layerpath) | 47 | edited_layers.append(layerpath) |
40 | oldmetapath = os.path.realpath(layerpath) | 48 | oldmetapath = os.path.realpath(layerpath) |
49 | |||
50 | # when downloading poky from tar.gz some tests will be skipped (BUG 12389) | ||
51 | try: | ||
52 | runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath) | ||
53 | except: | ||
54 | raise unittest.SkipTest("devtool tests require folder to be a git repo") | ||
55 | |||
41 | result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath) | 56 | result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath) |
42 | oldreporoot = result.output.rstrip() | 57 | oldreporoot = result.output.rstrip() |
43 | newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot)) | 58 | newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot)) |
44 | runCmd('git clone %s %s' % (oldreporoot, corecopydir), cwd=templayerdir) | 59 | runCmd('git clone file://%s %s' % (oldreporoot, corecopydir), cwd=templayerdir) |
45 | # Now we need to copy any modified files | 60 | # Now we need to copy any modified files |
46 | # You might ask "why not just copy the entire tree instead of | 61 | # You might ask "why not just copy the entire tree instead of |
47 | # cloning and doing this?" - well, the problem with that is | 62 | # cloning and doing this?" - well, the problem with that is |
@@ -80,32 +95,15 @@ def tearDownModule(): | |||
80 | bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb) | 95 | bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb) |
81 | shutil.rmtree(templayerdir) | 96 | shutil.rmtree(templayerdir) |
82 | 97 | ||
83 | class DevtoolBase(OESelftestTestCase): | 98 | class DevtoolTestCase(OESelftestTestCase): |
84 | |||
85 | @classmethod | ||
86 | def setUpClass(cls): | ||
87 | super(DevtoolBase, cls).setUpClass() | ||
88 | bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR']) | ||
89 | cls.original_sstate = bb_vars['SSTATE_DIR'] | ||
90 | cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool') | ||
91 | cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate | ||
92 | cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n' | ||
93 | % cls.original_sstate) | ||
94 | |||
95 | @classmethod | ||
96 | def tearDownClass(cls): | ||
97 | cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate) | ||
98 | runCmd('rm -rf %s' % cls.devtool_sstate) | ||
99 | super(DevtoolBase, cls).tearDownClass() | ||
100 | 99 | ||
101 | def setUp(self): | 100 | def setUp(self): |
102 | """Test case setup function""" | 101 | """Test case setup function""" |
103 | super(DevtoolBase, self).setUp() | 102 | super(DevtoolTestCase, self).setUp() |
104 | self.workspacedir = os.path.join(self.builddir, 'workspace') | 103 | self.workspacedir = os.path.join(self.builddir, 'workspace') |
105 | self.assertTrue(not os.path.exists(self.workspacedir), | 104 | self.assertTrue(not os.path.exists(self.workspacedir), |
106 | 'This test cannot be run with a workspace directory ' | 105 | 'This test cannot be run with a workspace directory ' |
107 | 'under the build directory') | 106 | 'under the build directory') |
108 | self.append_config(self.sstate_conf) | ||
109 | 107 | ||
110 | def _check_src_repo(self, repo_dir): | 108 | def _check_src_repo(self, repo_dir): |
111 | """Check srctree git repository""" | 109 | """Check srctree git repository""" |
@@ -235,6 +233,100 @@ class DevtoolBase(OESelftestTestCase): | |||
235 | filelist.append(' '.join(splitline)) | 233 | filelist.append(' '.join(splitline)) |
236 | return filelist | 234 | return filelist |
237 | 235 | ||
236 | def _check_diff(self, diffoutput, addlines, removelines): | ||
237 | """Check output from 'git diff' matches expectation""" | ||
238 | remaining_addlines = addlines[:] | ||
239 | remaining_removelines = removelines[:] | ||
240 | for line in diffoutput.splitlines(): | ||
241 | if line.startswith('+++') or line.startswith('---'): | ||
242 | continue | ||
243 | elif line.startswith('+'): | ||
244 | matched = False | ||
245 | for item in addlines: | ||
246 | if re.match(item, line[1:].strip()): | ||
247 | matched = True | ||
248 | remaining_addlines.remove(item) | ||
249 | break | ||
250 | self.assertTrue(matched, 'Unexpected diff add line: %s' % line) | ||
251 | elif line.startswith('-'): | ||
252 | matched = False | ||
253 | for item in removelines: | ||
254 | if re.match(item, line[1:].strip()): | ||
255 | matched = True | ||
256 | remaining_removelines.remove(item) | ||
257 | break | ||
258 | self.assertTrue(matched, 'Unexpected diff remove line: %s' % line) | ||
259 | if remaining_addlines: | ||
260 | self.fail('Expected added lines not found: %s' % remaining_addlines) | ||
261 | if remaining_removelines: | ||
262 | self.fail('Expected removed lines not found: %s' % remaining_removelines) | ||
263 | |||
264 | def _check_runqemu_prerequisites(self): | ||
265 | """Check runqemu is available | ||
266 | |||
267 | Whilst some tests would seemingly be better placed as a runtime test, | ||
268 | unfortunately the runtime tests run under bitbake and you can't run | ||
269 | devtool within bitbake (since devtool needs to run bitbake itself). | ||
270 | Additionally we are testing build-time functionality as well, so | ||
271 | really this has to be done as an oe-selftest test. | ||
272 | """ | ||
273 | machine = get_bb_var('MACHINE') | ||
274 | if not machine.startswith('qemu'): | ||
275 | self.skipTest('This test only works with qemu machines') | ||
276 | if not os.path.exists('/etc/runqemu-nosudo'): | ||
277 | self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test') | ||
278 | result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True) | ||
279 | if result.status != 0: | ||
280 | result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True) | ||
281 | if result.status != 0: | ||
282 | self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output) | ||
283 | for line in result.output.splitlines(): | ||
284 | if line.startswith('tap'): | ||
285 | break | ||
286 | else: | ||
287 | self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test') | ||
288 | |||
289 | def _test_devtool_add_git_url(self, git_url, version, pn, resulting_src_uri): | ||
290 | self.track_for_cleanup(self.workspacedir) | ||
291 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
292 | result = runCmd('devtool add --version %s %s %s' % (version, pn, git_url)) | ||
293 | self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created') | ||
294 | # Check the recipe name is correct | ||
295 | recipefile = get_bb_var('FILE', pn) | ||
296 | self.assertIn('%s_git.bb' % pn, recipefile, 'Recipe file incorrectly named') | ||
297 | self.assertIn(recipefile, result.output) | ||
298 | # Test devtool status | ||
299 | result = runCmd('devtool status') | ||
300 | self.assertIn(pn, result.output) | ||
301 | self.assertIn(recipefile, result.output) | ||
302 | checkvars = {} | ||
303 | checkvars['SRC_URI'] = resulting_src_uri | ||
304 | self._test_recipe_contents(recipefile, checkvars, []) | ||
305 | |||
306 | class DevtoolBase(DevtoolTestCase): | ||
307 | |||
308 | @classmethod | ||
309 | def setUpClass(cls): | ||
310 | super(DevtoolBase, cls).setUpClass() | ||
311 | bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR']) | ||
312 | cls.original_sstate = bb_vars['SSTATE_DIR'] | ||
313 | cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool') | ||
314 | cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate | ||
315 | cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n' | ||
316 | % cls.original_sstate) | ||
317 | cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"\n') | ||
318 | |||
319 | @classmethod | ||
320 | def tearDownClass(cls): | ||
321 | cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate) | ||
322 | runCmd('rm -rf %s' % cls.devtool_sstate) | ||
323 | super(DevtoolBase, cls).tearDownClass() | ||
324 | |||
325 | def setUp(self): | ||
326 | """Test case setup function""" | ||
327 | super(DevtoolBase, self).setUp() | ||
328 | self.append_config(self.sstate_conf) | ||
329 | |||
238 | 330 | ||
239 | class DevtoolTests(DevtoolBase): | 331 | class DevtoolTests(DevtoolBase): |
240 | 332 | ||
@@ -304,6 +396,38 @@ class DevtoolAddTests(DevtoolBase): | |||
304 | bindir = bindir[1:] | 396 | bindir = bindir[1:] |
305 | self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D') | 397 | self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D') |
306 | 398 | ||
399 | def test_devtool_add_binary(self): | ||
400 | # Create a binary package containing a known test file | ||
401 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
402 | self.track_for_cleanup(tempdir) | ||
403 | pn = 'tst-bin' | ||
404 | pv = '1.0' | ||
405 | test_file_dir = "var/lib/%s/" % pn | ||
406 | test_file_name = "test_file" | ||
407 | test_file_content = "TEST CONTENT" | ||
408 | test_file_package_root = os.path.join(tempdir, pn) | ||
409 | test_file_dir_full = os.path.join(test_file_package_root, test_file_dir) | ||
410 | bb.utils.mkdirhier(test_file_dir_full) | ||
411 | with open(os.path.join(test_file_dir_full, test_file_name), "w") as f: | ||
412 | f.write(test_file_content) | ||
413 | bin_package_path = os.path.join(tempdir, "%s.tar.gz" % pn) | ||
414 | runCmd("tar czf %s -C %s ." % (bin_package_path, test_file_package_root)) | ||
415 | |||
416 | # Test devtool add -b on the binary package | ||
417 | self.track_for_cleanup(self.workspacedir) | ||
418 | self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn) | ||
419 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
420 | result = runCmd('devtool add -b %s %s' % (pn, bin_package_path)) | ||
421 | self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created') | ||
422 | |||
423 | # Build the resulting recipe | ||
424 | result = runCmd('devtool build %s' % pn) | ||
425 | installdir = get_bb_var('D', pn) | ||
426 | self.assertTrue(installdir, 'Could not query installdir variable') | ||
427 | |||
428 | # Check that a known file from the binary package has indeed been installed | ||
429 | self.assertTrue(os.path.isfile(os.path.join(installdir, test_file_dir, test_file_name)), '%s not found in D' % test_file_name) | ||
430 | |||
307 | def test_devtool_add_git_local(self): | 431 | def test_devtool_add_git_local(self): |
308 | # We need dbus built so that DEPENDS recognition works | 432 | # We need dbus built so that DEPENDS recognition works |
309 | bitbake('dbus') | 433 | bitbake('dbus') |
@@ -336,15 +460,31 @@ class DevtoolAddTests(DevtoolBase): | |||
336 | self.assertIn(srcdir, result.output) | 460 | self.assertIn(srcdir, result.output) |
337 | self.assertIn(recipefile, result.output) | 461 | self.assertIn(recipefile, result.output) |
338 | checkvars = {} | 462 | checkvars = {} |
339 | checkvars['LICENSE'] = 'GPLv2' | 463 | checkvars['LICENSE'] = 'GPL-2.0-only' |
340 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' | 464 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' |
341 | checkvars['S'] = '${WORKDIR}/git' | 465 | checkvars['S'] = '${WORKDIR}/git' |
342 | checkvars['PV'] = '0.1+git${SRCPV}' | 466 | checkvars['PV'] = '0.1+git' |
343 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https' | 467 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master' |
344 | checkvars['SRCREV'] = srcrev | 468 | checkvars['SRCREV'] = srcrev |
345 | checkvars['DEPENDS'] = set(['dbus']) | 469 | checkvars['DEPENDS'] = set(['dbus']) |
346 | self._test_recipe_contents(recipefile, checkvars, []) | 470 | self._test_recipe_contents(recipefile, checkvars, []) |
347 | 471 | ||
472 | def test_devtool_add_git_style1(self): | ||
473 | version = 'v3.1.0' | ||
474 | pn = 'mbedtls' | ||
475 | # this will trigger reformat_git_uri with branch parameter in url | ||
476 | git_url = "'git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https'" | ||
477 | resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https" | ||
478 | self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri) | ||
479 | |||
480 | def test_devtool_add_git_style2(self): | ||
481 | version = 'v3.1.0' | ||
482 | pn = 'mbedtls' | ||
483 | # this will trigger reformat_git_uri with branch parameter in url | ||
484 | git_url = "'git://git@github.com/ARMmbed/mbedtls.git;protocol=https'" | ||
485 | resulting_src_uri = "gitsm://git@github.com/ARMmbed/mbedtls.git;protocol=https;branch=master" | ||
486 | self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri) | ||
487 | |||
348 | def test_devtool_add_library(self): | 488 | def test_devtool_add_library(self): |
349 | # Fetch source | 489 | # Fetch source |
350 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | 490 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') |
@@ -373,7 +513,7 @@ class DevtoolAddTests(DevtoolBase): | |||
373 | recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version) | 513 | recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version) |
374 | result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile) | 514 | result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile) |
375 | with open(recipefile, 'a') as f: | 515 | with open(recipefile, 'a') as f: |
376 | f.write('\nFILES_${PN}-dev += "${datadir}/cmake/Modules"\n') | 516 | f.write('\nFILES:${PN}-dev += "${datadir}/cmake/Modules"\n') |
377 | # We don't have the ability to pick up this dependency automatically yet... | 517 | # We don't have the ability to pick up this dependency automatically yet... |
378 | f.write('\nDEPENDS += "libusb1"\n') | 518 | f.write('\nDEPENDS += "libusb1"\n') |
379 | f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n') | 519 | f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n') |
@@ -405,7 +545,7 @@ class DevtoolAddTests(DevtoolBase): | |||
405 | self.track_for_cleanup(self.workspacedir) | 545 | self.track_for_cleanup(self.workspacedir) |
406 | self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe) | 546 | self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe) |
407 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | 547 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') |
408 | result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url)) | 548 | result = runCmd('devtool add --no-pypi %s %s -f %s' % (testrecipe, srcdir, url)) |
409 | self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output) | 549 | self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output) |
410 | self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') | 550 | self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') |
411 | self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created') | 551 | self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created') |
@@ -424,7 +564,7 @@ class DevtoolAddTests(DevtoolBase): | |||
424 | result = runCmd('devtool reset -n %s' % testrecipe) | 564 | result = runCmd('devtool reset -n %s' % testrecipe) |
425 | shutil.rmtree(srcdir) | 565 | shutil.rmtree(srcdir) |
426 | fakever = '1.9' | 566 | fakever = '1.9' |
427 | result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever)) | 567 | result = runCmd('devtool add --no-pypi %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever)) |
428 | self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') | 568 | self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') |
429 | # Test devtool status | 569 | # Test devtool status |
430 | result = runCmd('devtool status') | 570 | result = runCmd('devtool status') |
@@ -442,6 +582,7 @@ class DevtoolAddTests(DevtoolBase): | |||
442 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | 582 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') |
443 | self.track_for_cleanup(tempdir) | 583 | self.track_for_cleanup(tempdir) |
444 | url = 'gitsm://git.yoctoproject.org/mraa' | 584 | url = 'gitsm://git.yoctoproject.org/mraa' |
585 | url_branch = '%s;branch=master' % url | ||
445 | checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d' | 586 | checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d' |
446 | testrecipe = 'mraa' | 587 | testrecipe = 'mraa' |
447 | srcdir = os.path.join(tempdir, testrecipe) | 588 | srcdir = os.path.join(tempdir, testrecipe) |
@@ -461,8 +602,8 @@ class DevtoolAddTests(DevtoolBase): | |||
461 | self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') | 602 | self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') |
462 | checkvars = {} | 603 | checkvars = {} |
463 | checkvars['S'] = '${WORKDIR}/git' | 604 | checkvars['S'] = '${WORKDIR}/git' |
464 | checkvars['PV'] = '1.0+git${SRCPV}' | 605 | checkvars['PV'] = '1.0+git' |
465 | checkvars['SRC_URI'] = url | 606 | checkvars['SRC_URI'] = url_branch |
466 | checkvars['SRCREV'] = '${AUTOREV}' | 607 | checkvars['SRCREV'] = '${AUTOREV}' |
467 | self._test_recipe_contents(recipefile, checkvars, []) | 608 | self._test_recipe_contents(recipefile, checkvars, []) |
468 | # Try with revision and version specified | 609 | # Try with revision and version specified |
@@ -480,8 +621,8 @@ class DevtoolAddTests(DevtoolBase): | |||
480 | self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') | 621 | self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') |
481 | checkvars = {} | 622 | checkvars = {} |
482 | checkvars['S'] = '${WORKDIR}/git' | 623 | checkvars['S'] = '${WORKDIR}/git' |
483 | checkvars['PV'] = '1.5+git${SRCPV}' | 624 | checkvars['PV'] = '1.5+git' |
484 | checkvars['SRC_URI'] = url | 625 | checkvars['SRC_URI'] = url_branch |
485 | checkvars['SRCREV'] = checkrev | 626 | checkvars['SRCREV'] = checkrev |
486 | self._test_recipe_contents(recipefile, checkvars, []) | 627 | self._test_recipe_contents(recipefile, checkvars, []) |
487 | 628 | ||
@@ -504,7 +645,7 @@ class DevtoolAddTests(DevtoolBase): | |||
504 | result = runCmd('devtool status') | 645 | result = runCmd('devtool status') |
505 | self.assertIn(testrecipe, result.output) | 646 | self.assertIn(testrecipe, result.output) |
506 | self.assertIn(srcdir, result.output) | 647 | self.assertIn(srcdir, result.output) |
507 | # Check recipe | 648 | # Check recipedevtool add |
508 | recipefile = get_bb_var('FILE', testrecipe) | 649 | recipefile = get_bb_var('FILE', testrecipe) |
509 | self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') | 650 | self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') |
510 | checkvars = {} | 651 | checkvars = {} |
@@ -536,6 +677,19 @@ class DevtoolAddTests(DevtoolBase): | |||
536 | # Test devtool build | 677 | # Test devtool build |
537 | result = runCmd('devtool build %s' % pn) | 678 | result = runCmd('devtool build %s' % pn) |
538 | 679 | ||
680 | def test_devtool_add_python_egg_requires(self): | ||
681 | # Fetch source | ||
682 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
683 | self.track_for_cleanup(tempdir) | ||
684 | testver = '0.14.0' | ||
685 | url = 'https://files.pythonhosted.org/packages/e9/9e/25d59f5043cf763833b2581c8027fa92342c4cf8ee523b498ecdf460c16d/uvicorn-%s.tar.gz' % testver | ||
686 | testrecipe = 'python3-uvicorn' | ||
687 | srcdir = os.path.join(tempdir, testrecipe) | ||
688 | # Test devtool add | ||
689 | self.track_for_cleanup(self.workspacedir) | ||
690 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
691 | result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url)) | ||
692 | |||
539 | class DevtoolModifyTests(DevtoolBase): | 693 | class DevtoolModifyTests(DevtoolBase): |
540 | 694 | ||
541 | def test_devtool_modify(self): | 695 | def test_devtool_modify(self): |
@@ -649,7 +803,7 @@ class DevtoolModifyTests(DevtoolBase): | |||
649 | self.track_for_cleanup(self.workspacedir) | 803 | self.track_for_cleanup(self.workspacedir) |
650 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | 804 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') |
651 | 805 | ||
652 | testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split() | 806 | testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk'.split() |
653 | # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose | 807 | # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose |
654 | result = runCmd('bitbake-layers show-recipes gcc-source*') | 808 | result = runCmd('bitbake-layers show-recipes gcc-source*') |
655 | for line in result.output.splitlines(): | 809 | for line in result.output.splitlines(): |
@@ -697,6 +851,7 @@ class DevtoolModifyTests(DevtoolBase): | |||
697 | 851 | ||
698 | self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) | 852 | self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) |
699 | self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) | 853 | self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) |
854 | |||
700 | def test_devtool_modify_localfiles_only(self): | 855 | def test_devtool_modify_localfiles_only(self): |
701 | # Check preconditions | 856 | # Check preconditions |
702 | testrecipe = 'base-files' | 857 | testrecipe = 'base-files' |
@@ -763,6 +918,122 @@ class DevtoolModifyTests(DevtoolBase): | |||
763 | # Try building | 918 | # Try building |
764 | bitbake(testrecipe) | 919 | bitbake(testrecipe) |
765 | 920 | ||
921 | def test_devtool_modify_git_no_extract(self): | ||
922 | # Check preconditions | ||
923 | testrecipe = 'psplash' | ||
924 | src_uri = get_bb_var('SRC_URI', testrecipe) | ||
925 | self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe) | ||
926 | # Clean up anything in the workdir/sysroot/sstate cache | ||
927 | bitbake('%s -c cleansstate' % testrecipe) | ||
928 | # Try modifying a recipe | ||
929 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
930 | self.track_for_cleanup(tempdir) | ||
931 | self.track_for_cleanup(self.workspacedir) | ||
932 | self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) | ||
933 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
934 | result = runCmd('git clone https://git.yoctoproject.org/psplash %s && devtool modify -n %s %s' % (tempdir, testrecipe, tempdir)) | ||
935 | self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output) | ||
936 | matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend')) | ||
937 | self.assertTrue(matches, 'bbappend not created') | ||
938 | # Test devtool status | ||
939 | result = runCmd('devtool status') | ||
940 | self.assertIn(testrecipe, result.output) | ||
941 | self.assertIn(tempdir, result.output) | ||
942 | |||
943 | def test_devtool_modify_git_crates_subpath(self): | ||
944 | # This tests two things in devtool context: | ||
945 | # - that we support local git dependencies for cargo based recipe | ||
946 | # - that we support patches in SRC_URI when git url contains subpath parameter | ||
947 | |||
948 | # Check preconditions: | ||
949 | # recipe inherits cargo | ||
950 | # git:// uri with a subpath as the main package | ||
951 | # some crate:// in SRC_URI | ||
952 | # others git:// in SRC_URI | ||
953 | # cointains a patch | ||
954 | testrecipe = 'hello-rs' | ||
955 | bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'WORKDIR', 'CARGO_HOME'], testrecipe) | ||
956 | recipefile = bb_vars['FILE'] | ||
957 | workdir = bb_vars['WORKDIR'] | ||
958 | cargo_home = bb_vars['CARGO_HOME'] | ||
959 | src_uri = bb_vars['SRC_URI'].split() | ||
960 | self.assertTrue(src_uri[0].startswith('git://'), | ||
961 | 'This test expects the %s recipe to have a git repo has its main uri' % testrecipe) | ||
962 | self.assertIn(';subpath=', src_uri[0], | ||
963 | 'This test expects the %s recipe to have a git uri with subpath' % testrecipe) | ||
964 | self.assertTrue(any([uri.startswith('crate://') for uri in src_uri]), | ||
965 | 'This test expects the %s recipe to have some crates in its src uris' % testrecipe) | ||
966 | self.assertGreaterEqual(sum(map(lambda x:x.startswith('git://'), src_uri)), 2, | ||
967 | 'This test expects the %s recipe to have several git:// uris' % testrecipe) | ||
968 | self.assertTrue(any([uri.startswith('file://') and '.patch' in uri for uri in src_uri]), | ||
969 | 'This test expects the %s recipe to have a patch in its src uris' % testrecipe) | ||
970 | |||
971 | self._test_recipe_contents(recipefile, {}, ['ptest-cargo']) | ||
972 | |||
973 | # Clean up anything in the workdir/sysroot/sstate cache | ||
974 | bitbake('%s -c cleansstate' % testrecipe) | ||
975 | # Try modifying a recipe | ||
976 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
977 | self.track_for_cleanup(tempdir) | ||
978 | self.track_for_cleanup(self.workspacedir) | ||
979 | self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) | ||
980 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
981 | result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) | ||
982 | self.assertExists(os.path.join(tempdir, 'Cargo.toml'), 'Extracted source could not be found') | ||
983 | self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output) | ||
984 | matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe)) | ||
985 | self.assertTrue(matches, 'bbappend not created') | ||
986 | # Test devtool status | ||
987 | result = runCmd('devtool status') | ||
988 | self.assertIn(testrecipe, result.output) | ||
989 | self.assertIn(tempdir, result.output) | ||
990 | # Check git repo | ||
991 | self._check_src_repo(tempdir) | ||
992 | # Check that the patch is correctly applied. | ||
993 | # The last commit message in the tree must contain the following note: | ||
994 | # Notes (devtool): | ||
995 | # original patch: <patchname> | ||
996 | # .. | ||
997 | patchname = None | ||
998 | for uri in src_uri: | ||
999 | if uri.startswith('file://') and '.patch' in uri: | ||
1000 | patchname = uri.replace("file://", "").partition('.patch')[0] + '.patch' | ||
1001 | self.assertIsNotNone(patchname) | ||
1002 | result = runCmd('git -C %s log -1' % tempdir) | ||
1003 | self.assertIn("Notes (devtool):\n original patch: %s" % patchname, result.output) | ||
1004 | |||
1005 | # Configure the recipe to check that the git dependencies are correctly patched in cargo config | ||
1006 | bitbake('-c configure %s' % testrecipe) | ||
1007 | |||
1008 | cargo_config_path = os.path.join(cargo_home, 'config') | ||
1009 | with open(cargo_config_path, "r") as f: | ||
1010 | cargo_config_contents = [line.strip('\n') for line in f.readlines()] | ||
1011 | |||
1012 | # Get back git dependencies of the recipe (ignoring the main one) | ||
1013 | # and check that they are all correctly patched to be fetched locally | ||
1014 | git_deps = [uri for uri in src_uri if uri.startswith("git://")][1:] | ||
1015 | for git_dep in git_deps: | ||
1016 | raw_url, _, raw_parms = git_dep.partition(";") | ||
1017 | parms = {} | ||
1018 | for parm in raw_parms.split(";"): | ||
1019 | name_parm, _, value_parm = parm.partition('=') | ||
1020 | parms[name_parm]=value_parm | ||
1021 | self.assertIn('protocol', parms, 'git dependencies uri should contain the "protocol" parameter') | ||
1022 | self.assertIn('name', parms, 'git dependencies uri should contain the "name" parameter') | ||
1023 | self.assertIn('destsuffix', parms, 'git dependencies uri should contain the "destsuffix" parameter') | ||
1024 | self.assertIn('type', parms, 'git dependencies uri should contain the "type" parameter') | ||
1025 | self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"') | ||
1026 | raw_url = raw_url.replace("git://", '%s://' % parms['protocol']) | ||
1027 | patch_line = '[patch."%s"]' % raw_url | ||
1028 | path_patched = os.path.join(workdir, parms['destsuffix']) | ||
1029 | path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched) | ||
1030 | # Would have been better to use tomllib to read this file :/ | ||
1031 | self.assertIn(patch_line, cargo_config_contents) | ||
1032 | self.assertIn(path_override_line, cargo_config_contents) | ||
1033 | |||
1034 | # Try to package the recipe | ||
1035 | bitbake('-c package_qa %s' % testrecipe) | ||
1036 | |||
766 | def test_devtool_modify_localfiles(self): | 1037 | def test_devtool_modify_localfiles(self): |
767 | # Check preconditions | 1038 | # Check preconditions |
768 | testrecipe = 'lighttpd' | 1039 | testrecipe = 'lighttpd' |
@@ -828,12 +1099,43 @@ class DevtoolModifyTests(DevtoolBase): | |||
828 | runCmd('git -C %s checkout %s' % (tempdir, branch)) | 1099 | runCmd('git -C %s checkout %s' % (tempdir, branch)) |
829 | with open(source, "rt") as f: | 1100 | with open(source, "rt") as f: |
830 | content = f.read() | 1101 | content = f.read() |
831 | self.assertEquals(content, expected) | 1102 | self.assertEqual(content, expected) |
832 | check('devtool', 'This is a test for something\n') | 1103 | if self.td["MACHINE"] == "qemux86": |
1104 | check('devtool', 'This is a test for qemux86\n') | ||
1105 | elif self.td["MACHINE"] == "qemuarm": | ||
1106 | check('devtool', 'This is a test for qemuarm\n') | ||
1107 | else: | ||
1108 | check('devtool', 'This is a test for something\n') | ||
833 | check('devtool-no-overrides', 'This is a test for something\n') | 1109 | check('devtool-no-overrides', 'This is a test for something\n') |
834 | check('devtool-override-qemuarm', 'This is a test for qemuarm\n') | 1110 | check('devtool-override-qemuarm', 'This is a test for qemuarm\n') |
835 | check('devtool-override-qemux86', 'This is a test for qemux86\n') | 1111 | check('devtool-override-qemux86', 'This is a test for qemux86\n') |
836 | 1112 | ||
1113 | def test_devtool_modify_multiple_sources(self): | ||
1114 | # This test check that recipes fetching several sources can be used with devtool modify/build | ||
1115 | # Check preconditions | ||
1116 | testrecipe = 'bzip2' | ||
1117 | src_uri = get_bb_var('SRC_URI', testrecipe) | ||
1118 | src1 = 'https://' in src_uri | ||
1119 | src2 = 'git://' in src_uri | ||
1120 | self.assertTrue(src1 and src2, 'This test expects the %s recipe to fetch both a git source and a tarball and it seems that it no longer does' % testrecipe) | ||
1121 | # Clean up anything in the workdir/sysroot/sstate cache | ||
1122 | bitbake('%s -c cleansstate' % testrecipe) | ||
1123 | # Try modifying a recipe | ||
1124 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
1125 | self.track_for_cleanup(tempdir) | ||
1126 | self.track_for_cleanup(self.workspacedir) | ||
1127 | self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) | ||
1128 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
1129 | result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) | ||
1130 | self.assertEqual(result.status, 0, "Could not modify recipe %s. Output: %s" % (testrecipe, result.output)) | ||
1131 | # Test devtool status | ||
1132 | result = runCmd('devtool status') | ||
1133 | self.assertIn(testrecipe, result.output) | ||
1134 | self.assertIn(tempdir, result.output) | ||
1135 | # Try building | ||
1136 | result = bitbake(testrecipe) | ||
1137 | self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output)) | ||
1138 | |||
837 | class DevtoolUpdateTests(DevtoolBase): | 1139 | class DevtoolUpdateTests(DevtoolBase): |
838 | 1140 | ||
839 | def test_devtool_update_recipe(self): | 1141 | def test_devtool_update_recipe(self): |
@@ -863,14 +1165,15 @@ class DevtoolUpdateTests(DevtoolBase): | |||
863 | result = runCmd('git commit -m "Add a new file"', cwd=tempdir) | 1165 | result = runCmd('git commit -m "Add a new file"', cwd=tempdir) |
864 | self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) | 1166 | self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) |
865 | result = runCmd('devtool update-recipe %s' % testrecipe) | 1167 | result = runCmd('devtool update-recipe %s' % testrecipe) |
1168 | result = runCmd('git add minicom', cwd=os.path.dirname(recipefile)) | ||
866 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), | 1169 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), |
867 | ('??', '.*/0001-Change-the-README.patch$'), | 1170 | ('A ', '.*/0001-Change-the-README.patch$'), |
868 | ('??', '.*/0002-Add-a-new-file.patch$')] | 1171 | ('A ', '.*/0002-Add-a-new-file.patch$')] |
869 | self._check_repo_status(os.path.dirname(recipefile), expected_status) | 1172 | self._check_repo_status(os.path.dirname(recipefile), expected_status) |
870 | 1173 | ||
871 | def test_devtool_update_recipe_git(self): | 1174 | def test_devtool_update_recipe_git(self): |
872 | # Check preconditions | 1175 | # Check preconditions |
873 | testrecipe = 'mtd-utils' | 1176 | testrecipe = 'mtd-utils-selftest' |
874 | bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) | 1177 | bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) |
875 | recipefile = bb_vars['FILE'] | 1178 | recipefile = bb_vars['FILE'] |
876 | src_uri = bb_vars['SRC_URI'] | 1179 | src_uri = bb_vars['SRC_URI'] |
@@ -904,28 +1207,12 @@ class DevtoolUpdateTests(DevtoolBase): | |||
904 | self._check_repo_status(os.path.dirname(recipefile), expected_status) | 1207 | self._check_repo_status(os.path.dirname(recipefile), expected_status) |
905 | 1208 | ||
906 | result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) | 1209 | result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) |
907 | addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"'] | 1210 | addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master"'] |
908 | srcurilines = src_uri.split() | 1211 | srcurilines = src_uri.split() |
909 | srcurilines[0] = 'SRC_URI = "' + srcurilines[0] | 1212 | srcurilines[0] = 'SRC_URI = "' + srcurilines[0] |
910 | srcurilines.append('"') | 1213 | srcurilines.append('"') |
911 | removelines = ['SRCREV = ".*"'] + srcurilines | 1214 | removelines = ['SRCREV = ".*"'] + srcurilines |
912 | for line in result.output.splitlines(): | 1215 | self._check_diff(result.output, addlines, removelines) |
913 | if line.startswith('+++') or line.startswith('---'): | ||
914 | continue | ||
915 | elif line.startswith('+'): | ||
916 | matched = False | ||
917 | for item in addlines: | ||
918 | if re.match(item, line[1:].strip()): | ||
919 | matched = True | ||
920 | break | ||
921 | self.assertTrue(matched, 'Unexpected diff add line: %s' % line) | ||
922 | elif line.startswith('-'): | ||
923 | matched = False | ||
924 | for item in removelines: | ||
925 | if re.match(item, line[1:].strip()): | ||
926 | matched = True | ||
927 | break | ||
928 | self.assertTrue(matched, 'Unexpected diff remove line: %s' % line) | ||
929 | # Now try with auto mode | 1216 | # Now try with auto mode |
930 | runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile))) | 1217 | runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile))) |
931 | result = runCmd('devtool update-recipe %s' % testrecipe) | 1218 | result = runCmd('devtool update-recipe %s' % testrecipe) |
@@ -975,7 +1262,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
975 | self.assertExists(patchfile, 'Patch file not created') | 1262 | self.assertExists(patchfile, 'Patch file not created') |
976 | 1263 | ||
977 | # Check bbappend contents | 1264 | # Check bbappend contents |
978 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 1265 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
979 | '\n', | 1266 | '\n', |
980 | 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n', | 1267 | 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n', |
981 | '\n'] | 1268 | '\n'] |
@@ -990,7 +1277,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
990 | result = runCmd('git reset HEAD^', cwd=tempsrcdir) | 1277 | result = runCmd('git reset HEAD^', cwd=tempsrcdir) |
991 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) | 1278 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) |
992 | self.assertNotExists(patchfile, 'Patch file not deleted') | 1279 | self.assertNotExists(patchfile, 'Patch file not deleted') |
993 | expectedlines2 = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 1280 | expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
994 | '\n'] | 1281 | '\n'] |
995 | with open(bbappendfile, 'r') as f: | 1282 | with open(bbappendfile, 'r') as f: |
996 | self.assertEqual(expectedlines2, f.readlines()) | 1283 | self.assertEqual(expectedlines2, f.readlines()) |
@@ -1007,10 +1294,11 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1007 | 1294 | ||
1008 | def test_devtool_update_recipe_append_git(self): | 1295 | def test_devtool_update_recipe_append_git(self): |
1009 | # Check preconditions | 1296 | # Check preconditions |
1010 | testrecipe = 'mtd-utils' | 1297 | testrecipe = 'mtd-utils-selftest' |
1011 | bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) | 1298 | bb_vars = get_bb_vars(['FILE', 'SRC_URI', 'LAYERSERIES_CORENAMES'], testrecipe) |
1012 | recipefile = bb_vars['FILE'] | 1299 | recipefile = bb_vars['FILE'] |
1013 | src_uri = bb_vars['SRC_URI'] | 1300 | src_uri = bb_vars['SRC_URI'] |
1301 | corenames = bb_vars['LAYERSERIES_CORENAMES'] | ||
1014 | self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe) | 1302 | self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe) |
1015 | for entry in src_uri.split(): | 1303 | for entry in src_uri.split(): |
1016 | if entry.startswith('git://'): | 1304 | if entry.startswith('git://'): |
@@ -1041,7 +1329,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1041 | f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n') | 1329 | f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n') |
1042 | f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n') | 1330 | f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n') |
1043 | f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n') | 1331 | f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n') |
1044 | f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "${LAYERSERIES_COMPAT_core}"\n') | 1332 | f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "%s"\n' % corenames) |
1045 | self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir) | 1333 | self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir) |
1046 | result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir) | 1334 | result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir) |
1047 | # Create the bbappend | 1335 | # Create the bbappend |
@@ -1117,14 +1405,30 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1117 | runCmd('echo "Bar" > new-file', cwd=tempdir) | 1405 | runCmd('echo "Bar" > new-file', cwd=tempdir) |
1118 | runCmd('git add new-file', cwd=tempdir) | 1406 | runCmd('git add new-file', cwd=tempdir) |
1119 | runCmd('git commit -m "Add new file"', cwd=tempdir) | 1407 | runCmd('git commit -m "Add new file"', cwd=tempdir) |
1120 | self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' % | ||
1121 | os.path.dirname(recipefile)) | ||
1122 | runCmd('devtool update-recipe %s' % testrecipe) | 1408 | runCmd('devtool update-recipe %s' % testrecipe) |
1123 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), | 1409 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), |
1124 | (' M', '.*/makedevs/makedevs.c$'), | 1410 | (' M', '.*/makedevs/makedevs.c$'), |
1125 | ('??', '.*/makedevs/new-local$'), | 1411 | ('??', '.*/makedevs/new-local$'), |
1126 | ('??', '.*/makedevs/0001-Add-new-file.patch$')] | 1412 | ('??', '.*/makedevs/0001-Add-new-file.patch$')] |
1127 | self._check_repo_status(os.path.dirname(recipefile), expected_status) | 1413 | self._check_repo_status(os.path.dirname(recipefile), expected_status) |
1414 | # Now try to update recipe in another layer, so first, clean it | ||
1415 | runCmd('cd %s; git clean -fd .; git checkout .' % os.path.dirname(recipefile)) | ||
1416 | # Create a temporary layer and add it to bblayers.conf | ||
1417 | self._create_temp_layer(templayerdir, True, 'templayer') | ||
1418 | # Update recipe in templayer | ||
1419 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) | ||
1420 | self.assertNotIn('WARNING:', result.output) | ||
1421 | # Check recipe is still clean | ||
1422 | self._check_repo_status(os.path.dirname(recipefile), []) | ||
1423 | splitpath = os.path.dirname(recipefile).split(os.sep) | ||
1424 | appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1]) | ||
1425 | bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir) | ||
1426 | patchfile = os.path.join(appenddir, testrecipe, '0001-Add-new-file.patch') | ||
1427 | new_local_file = os.path.join(appenddir, testrecipe, 'new_local') | ||
1428 | local_file = os.path.join(appenddir, testrecipe, 'makedevs.c') | ||
1429 | self.assertExists(patchfile, 'Patch file 0001-Add-new-file.patch not created') | ||
1430 | self.assertExists(local_file, 'File makedevs.c not created') | ||
1431 | self.assertExists(patchfile, 'File new_local not created') | ||
1128 | 1432 | ||
1129 | def test_devtool_update_recipe_local_files_2(self): | 1433 | def test_devtool_update_recipe_local_files_2(self): |
1130 | """Check local source files support when oe-local-files is in Git""" | 1434 | """Check local source files support when oe-local-files is in Git""" |
@@ -1259,7 +1563,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1259 | # Modify one file | 1563 | # Modify one file |
1260 | srctree = os.path.join(self.workspacedir, 'sources', testrecipe) | 1564 | srctree = os.path.join(self.workspacedir, 'sources', testrecipe) |
1261 | runCmd('echo "Another line" >> README', cwd=srctree) | 1565 | runCmd('echo "Another line" >> README', cwd=srctree) |
1262 | runCmd('git commit -a --amend --no-edit', cwd=srctree) | 1566 | runCmd('git commit -a --amend --no-edit --no-verify', cwd=srctree) |
1263 | self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) | 1567 | self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) |
1264 | result = runCmd('devtool update-recipe %s' % testrecipe) | 1568 | result = runCmd('devtool update-recipe %s' % testrecipe) |
1265 | expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)] | 1569 | expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)] |
@@ -1295,6 +1599,121 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1295 | expected_status = [] | 1599 | expected_status = [] |
1296 | self._check_repo_status(os.path.dirname(recipefile), expected_status) | 1600 | self._check_repo_status(os.path.dirname(recipefile), expected_status) |
1297 | 1601 | ||
1602 | def test_devtool_finish_modify_git_subdir(self): | ||
1603 | # Check preconditions | ||
1604 | testrecipe = 'dos2unix' | ||
1605 | self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n') | ||
1606 | bb_vars = get_bb_vars(['SRC_URI', 'S', 'WORKDIR', 'FILE'], testrecipe) | ||
1607 | self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe) | ||
1608 | workdir_git = '%s/git/' % bb_vars['WORKDIR'] | ||
1609 | if not bb_vars['S'].startswith(workdir_git): | ||
1610 | self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe) | ||
1611 | subdir = bb_vars['S'].split(workdir_git, 1)[1] | ||
1612 | # Clean up anything in the workdir/sysroot/sstate cache | ||
1613 | bitbake('%s -c cleansstate' % testrecipe) | ||
1614 | # Try modifying a recipe | ||
1615 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
1616 | self.track_for_cleanup(tempdir) | ||
1617 | self.track_for_cleanup(self.workspacedir) | ||
1618 | self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) | ||
1619 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
1620 | result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) | ||
1621 | testsrcfile = os.path.join(tempdir, subdir, 'dos2unix.c') | ||
1622 | self.assertExists(testsrcfile, 'Extracted source could not be found') | ||
1623 | self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output) | ||
1624 | self.assertNotExists(os.path.join(tempdir, subdir, '.git'), 'Subdirectory has been initialised as a git repo') | ||
1625 | # Check git repo | ||
1626 | self._check_src_repo(tempdir) | ||
1627 | # Modify file | ||
1628 | runCmd("sed -i '1s:^:/* Add a comment */\\n:' %s" % testsrcfile) | ||
1629 | result = runCmd('git commit -a -m "Add a comment"', cwd=tempdir) | ||
1630 | # Now try updating original recipe | ||
1631 | recipefile = bb_vars['FILE'] | ||
1632 | recipedir = os.path.dirname(recipefile) | ||
1633 | self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe)) | ||
1634 | result = runCmd('devtool update-recipe %s' % testrecipe) | ||
1635 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), | ||
1636 | ('??', '.*/%s/%s/$' % (testrecipe, testrecipe))] | ||
1637 | self._check_repo_status(os.path.dirname(recipefile), expected_status) | ||
1638 | result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) | ||
1639 | removelines = ['SRC_URI = "git://.*"'] | ||
1640 | addlines = [ | ||
1641 | 'SRC_URI = "git://.* \\\\', | ||
1642 | 'file://0001-Add-a-comment.patch;patchdir=.. \\\\', | ||
1643 | '"' | ||
1644 | ] | ||
1645 | self._check_diff(result.output, addlines, removelines) | ||
1646 | # Put things back so we can run devtool finish on a different layer | ||
1647 | runCmd('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe)) | ||
1648 | # Run devtool finish | ||
1649 | res = re.search('recipes-.*', recipedir) | ||
1650 | self.assertTrue(res, 'Unable to find recipe subdirectory') | ||
1651 | recipesubdir = res[0] | ||
1652 | self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, recipesubdir)) | ||
1653 | result = runCmd('devtool finish %s meta-selftest' % testrecipe) | ||
1654 | # Check bbappend file contents | ||
1655 | appendfn = os.path.join(self.testlayer_path, recipesubdir, '%s_%%.bbappend' % testrecipe) | ||
1656 | with open(appendfn, 'r') as f: | ||
1657 | appendlines = f.readlines() | ||
1658 | expected_appendlines = [ | ||
1659 | 'FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', | ||
1660 | '\n', | ||
1661 | 'SRC_URI += "file://0001-Add-a-comment.patch;patchdir=.."\n', | ||
1662 | '\n' | ||
1663 | ] | ||
1664 | self.assertEqual(appendlines, expected_appendlines) | ||
1665 | self.assertExists(os.path.join(os.path.dirname(appendfn), testrecipe, '0001-Add-a-comment.patch')) | ||
1666 | # Try building | ||
1667 | bitbake('%s -c patch' % testrecipe) | ||
1668 | |||
1669 | def test_devtool_git_submodules(self): | ||
1670 | # This tests if we can add a patch in a git submodule and extract it properly using devtool finish | ||
1671 | # Check preconditions | ||
1672 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') | ||
1673 | self.track_for_cleanup(self.workspacedir) | ||
1674 | recipe = 'vulkan-samples' | ||
1675 | src_uri = get_bb_var('SRC_URI', recipe) | ||
1676 | self.assertIn('gitsm://', src_uri, 'This test expects the %s recipe to be a git recipe with submodules' % recipe) | ||
1677 | oldrecipefile = get_bb_var('FILE', recipe) | ||
1678 | recipedir = os.path.dirname(oldrecipefile) | ||
1679 | result = runCmd('git status --porcelain .', cwd=recipedir) | ||
1680 | if result.output.strip(): | ||
1681 | self.fail('Recipe directory for %s contains uncommitted changes' % recipe) | ||
1682 | self.assertIn('/meta/', recipedir) | ||
1683 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
1684 | self.track_for_cleanup(tempdir) | ||
1685 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
1686 | result = runCmd('devtool modify %s %s' % (recipe, tempdir)) | ||
1687 | self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found') | ||
1688 | # Test devtool status | ||
1689 | result = runCmd('devtool status') | ||
1690 | self.assertIn(recipe, result.output) | ||
1691 | self.assertIn(tempdir, result.output) | ||
1692 | # Modify a source file in a submodule, (grab the first one) | ||
1693 | result = runCmd('git submodule --quiet foreach \'echo $sm_path\'', cwd=tempdir) | ||
1694 | submodule = result.output.splitlines()[0] | ||
1695 | submodule_path = os.path.join(tempdir, submodule) | ||
1696 | runCmd('echo "#This is a first comment" >> testfile', cwd=submodule_path) | ||
1697 | result = runCmd('git status --porcelain . ', cwd=submodule_path) | ||
1698 | self.assertIn("testfile", result.output) | ||
1699 | runCmd('git add testfile; git commit -m "Adding a new file"', cwd=submodule_path) | ||
1700 | |||
1701 | # Try finish to the original layer | ||
1702 | self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir)) | ||
1703 | runCmd('devtool finish -f %s meta' % recipe) | ||
1704 | result = runCmd('devtool status') | ||
1705 | self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t') | ||
1706 | self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish') | ||
1707 | expected_status = [(' M', '.*/%s$' % os.path.basename(oldrecipefile)), | ||
1708 | ('??', '.*/.*-Adding-a-new-file.patch$')] | ||
1709 | self._check_repo_status(recipedir, expected_status) | ||
1710 | # Make sure the patch is added to the recipe with the correct "patchdir" option | ||
1711 | result = runCmd('git diff .', cwd=recipedir) | ||
1712 | addlines = [ | ||
1713 | 'file://0001-Adding-a-new-file.patch;patchdir=%s \\\\' % submodule | ||
1714 | ] | ||
1715 | self._check_diff(result.output, addlines, []) | ||
1716 | |||
1298 | class DevtoolExtractTests(DevtoolBase): | 1717 | class DevtoolExtractTests(DevtoolBase): |
1299 | 1718 | ||
1300 | def test_devtool_extract(self): | 1719 | def test_devtool_extract(self): |
@@ -1343,29 +1762,9 @@ class DevtoolExtractTests(DevtoolBase): | |||
1343 | matches2 = glob.glob(stampprefix2 + '*') | 1762 | matches2 = glob.glob(stampprefix2 + '*') |
1344 | self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2) | 1763 | self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2) |
1345 | 1764 | ||
1765 | @OETestTag("runqemu") | ||
1346 | def test_devtool_deploy_target(self): | 1766 | def test_devtool_deploy_target(self): |
1347 | # NOTE: Whilst this test would seemingly be better placed as a runtime test, | 1767 | self._check_runqemu_prerequisites() |
1348 | # unfortunately the runtime tests run under bitbake and you can't run | ||
1349 | # devtool within bitbake (since devtool needs to run bitbake itself). | ||
1350 | # Additionally we are testing build-time functionality as well, so | ||
1351 | # really this has to be done as an oe-selftest test. | ||
1352 | # | ||
1353 | # Check preconditions | ||
1354 | machine = get_bb_var('MACHINE') | ||
1355 | if not machine.startswith('qemu'): | ||
1356 | self.skipTest('This test only works with qemu machines') | ||
1357 | if not os.path.exists('/etc/runqemu-nosudo'): | ||
1358 | self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test') | ||
1359 | result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True) | ||
1360 | if result.status != 0: | ||
1361 | result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True) | ||
1362 | if result.status != 0: | ||
1363 | self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output) | ||
1364 | for line in result.output.splitlines(): | ||
1365 | if line.startswith('tap'): | ||
1366 | break | ||
1367 | else: | ||
1368 | self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test') | ||
1369 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') | 1768 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') |
1370 | # Definitions | 1769 | # Definitions |
1371 | testrecipe = 'mdadm' | 1770 | testrecipe = 'mdadm' |
@@ -1463,6 +1862,14 @@ class DevtoolExtractTests(DevtoolBase): | |||
1463 | 1862 | ||
1464 | class DevtoolUpgradeTests(DevtoolBase): | 1863 | class DevtoolUpgradeTests(DevtoolBase): |
1465 | 1864 | ||
1865 | def setUp(self): | ||
1866 | super().setUp() | ||
1867 | try: | ||
1868 | runCmd("git config --global user.name") | ||
1869 | runCmd("git config --global user.email") | ||
1870 | except: | ||
1871 | self.skip("Git user.name and user.email must be set") | ||
1872 | |||
1466 | def test_devtool_upgrade(self): | 1873 | def test_devtool_upgrade(self): |
1467 | # Check preconditions | 1874 | # Check preconditions |
1468 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') | 1875 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') |
@@ -1543,6 +1950,54 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1543 | self.assertNotIn(recipe, result.output) | 1950 | self.assertNotIn(recipe, result.output) |
1544 | self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting') | 1951 | self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting') |
1545 | 1952 | ||
1953 | def test_devtool_upgrade_drop_md5sum(self): | ||
1954 | # Check preconditions | ||
1955 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') | ||
1956 | self.track_for_cleanup(self.workspacedir) | ||
1957 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
1958 | # For the moment, we are using a real recipe. | ||
1959 | recipe = 'devtool-upgrade-test3' | ||
1960 | version = '1.6.0' | ||
1961 | oldrecipefile = get_bb_var('FILE', recipe) | ||
1962 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
1963 | self.track_for_cleanup(tempdir) | ||
1964 | # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that | ||
1965 | # we are downgrading instead of upgrading. | ||
1966 | result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version)) | ||
1967 | # Check new recipe file is present | ||
1968 | newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version)) | ||
1969 | self.assertExists(newrecipefile, 'Recipe file should exist after upgrade') | ||
1970 | # Check recipe got changed as expected | ||
1971 | with open(oldrecipefile + '.upgraded', 'r') as f: | ||
1972 | desiredlines = f.readlines() | ||
1973 | with open(newrecipefile, 'r') as f: | ||
1974 | newlines = f.readlines() | ||
1975 | self.assertEqual(desiredlines, newlines) | ||
1976 | |||
1977 | def test_devtool_upgrade_all_checksums(self): | ||
1978 | # Check preconditions | ||
1979 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') | ||
1980 | self.track_for_cleanup(self.workspacedir) | ||
1981 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
1982 | # For the moment, we are using a real recipe. | ||
1983 | recipe = 'devtool-upgrade-test4' | ||
1984 | version = '1.6.0' | ||
1985 | oldrecipefile = get_bb_var('FILE', recipe) | ||
1986 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
1987 | self.track_for_cleanup(tempdir) | ||
1988 | # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that | ||
1989 | # we are downgrading instead of upgrading. | ||
1990 | result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version)) | ||
1991 | # Check new recipe file is present | ||
1992 | newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version)) | ||
1993 | self.assertExists(newrecipefile, 'Recipe file should exist after upgrade') | ||
1994 | # Check recipe got changed as expected | ||
1995 | with open(oldrecipefile + '.upgraded', 'r') as f: | ||
1996 | desiredlines = f.readlines() | ||
1997 | with open(newrecipefile, 'r') as f: | ||
1998 | newlines = f.readlines() | ||
1999 | self.assertEqual(desiredlines, newlines) | ||
2000 | |||
1546 | def test_devtool_layer_plugins(self): | 2001 | def test_devtool_layer_plugins(self): |
1547 | """Test that devtool can use plugins from other layers. | 2002 | """Test that devtool can use plugins from other layers. |
1548 | 2003 | ||
@@ -1561,7 +2016,15 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1561 | for p in paths: | 2016 | for p in paths: |
1562 | dstdir = os.path.join(dstdir, p) | 2017 | dstdir = os.path.join(dstdir, p) |
1563 | if not os.path.exists(dstdir): | 2018 | if not os.path.exists(dstdir): |
1564 | os.makedirs(dstdir) | 2019 | try: |
2020 | os.makedirs(dstdir) | ||
2021 | except PermissionError: | ||
2022 | return False | ||
2023 | except OSError as e: | ||
2024 | if e.errno == errno.EROFS: | ||
2025 | return False | ||
2026 | else: | ||
2027 | raise e | ||
1565 | if p == "lib": | 2028 | if p == "lib": |
1566 | # Can race with other tests | 2029 | # Can race with other tests |
1567 | self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) | 2030 | self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) |
@@ -1569,8 +2032,12 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1569 | self.track_for_cleanup(dstdir) | 2032 | self.track_for_cleanup(dstdir) |
1570 | dstfile = os.path.join(dstdir, os.path.basename(srcfile)) | 2033 | dstfile = os.path.join(dstdir, os.path.basename(srcfile)) |
1571 | if srcfile != dstfile: | 2034 | if srcfile != dstfile: |
1572 | shutil.copy(srcfile, dstfile) | 2035 | try: |
2036 | shutil.copy(srcfile, dstfile) | ||
2037 | except PermissionError: | ||
2038 | return False | ||
1573 | self.track_for_cleanup(dstfile) | 2039 | self.track_for_cleanup(dstfile) |
2040 | return True | ||
1574 | 2041 | ||
1575 | def test_devtool_load_plugin(self): | 2042 | def test_devtool_load_plugin(self): |
1576 | """Test that devtool loads only the first found plugin in BBPATH.""" | 2043 | """Test that devtool loads only the first found plugin in BBPATH.""" |
@@ -1588,15 +2055,17 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1588 | plugincontent = fh.readlines() | 2055 | plugincontent = fh.readlines() |
1589 | try: | 2056 | try: |
1590 | self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') | 2057 | self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') |
1591 | for path in searchpath: | 2058 | searchpath = [ |
1592 | self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool') | 2059 | path for path in searchpath |
2060 | if self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool') | ||
2061 | ] | ||
1593 | result = runCmd("devtool --quiet count") | 2062 | result = runCmd("devtool --quiet count") |
1594 | self.assertEqual(result.output, '1') | 2063 | self.assertEqual(result.output, '1') |
1595 | result = runCmd("devtool --quiet multiloaded") | 2064 | result = runCmd("devtool --quiet multiloaded") |
1596 | self.assertEqual(result.output, "no") | 2065 | self.assertEqual(result.output, "no") |
1597 | for path in searchpath: | 2066 | for path in searchpath: |
1598 | result = runCmd("devtool --quiet bbdir") | 2067 | result = runCmd("devtool --quiet bbdir") |
1599 | self.assertEqual(result.output, path) | 2068 | self.assertEqual(os.path.realpath(result.output), os.path.realpath(path)) |
1600 | os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py')) | 2069 | os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py')) |
1601 | finally: | 2070 | finally: |
1602 | with open(srcfile, 'w') as fh: | 2071 | with open(srcfile, 'w') as fh: |
@@ -1777,6 +2246,52 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1777 | if files: | 2246 | if files: |
1778 | self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files)) | 2247 | self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files)) |
1779 | 2248 | ||
2249 | def test_devtool_finish_update_patch(self): | ||
2250 | # This test uses a modified version of the sysdig recipe from meta-oe. | ||
2251 | # - The patches have been renamed. | ||
2252 | # - The dependencies are commented out since the recipe is not being | ||
2253 | # built. | ||
2254 | # | ||
2255 | # The sysdig recipe is interesting in that it fetches two different Git | ||
2256 | # repositories, and there are patches for both. This leads to that | ||
2257 | # devtool will create ignore commits as it uses Git submodules to keep | ||
2258 | # track of the second repository. | ||
2259 | # | ||
2260 | # This test will verify that the ignored commits actually are ignored | ||
2261 | # when a commit in between is modified. It will also verify that the | ||
2262 | # updated patch keeps its original name. | ||
2263 | |||
2264 | # Check preconditions | ||
2265 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') | ||
2266 | # Try modifying a recipe | ||
2267 | self.track_for_cleanup(self.workspacedir) | ||
2268 | recipe = 'sysdig-selftest' | ||
2269 | recipefile = get_bb_var('FILE', recipe) | ||
2270 | recipedir = os.path.dirname(recipefile) | ||
2271 | result = runCmd('git status --porcelain .', cwd=recipedir) | ||
2272 | if result.output.strip(): | ||
2273 | self.fail('Recipe directory for %s contains uncommitted changes' % recipe) | ||
2274 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
2275 | self.track_for_cleanup(tempdir) | ||
2276 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
2277 | result = runCmd('devtool modify %s %s' % (recipe, tempdir)) | ||
2278 | self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (recipedir, recipe, recipe, os.path.basename(recipefile))) | ||
2279 | self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found') | ||
2280 | # Make a change to one of the existing commits | ||
2281 | result = runCmd('echo "# A comment " >> CMakeLists.txt', cwd=tempdir) | ||
2282 | result = runCmd('git status --porcelain', cwd=tempdir) | ||
2283 | self.assertIn('M CMakeLists.txt', result.output) | ||
2284 | result = runCmd('git commit --fixup HEAD^ CMakeLists.txt', cwd=tempdir) | ||
2285 | result = runCmd('git show -s --format=%s', cwd=tempdir) | ||
2286 | self.assertIn('fixup! cmake: Pass PROBE_NAME via CFLAGS', result.output) | ||
2287 | result = runCmd('GIT_SEQUENCE_EDITOR=true git rebase -i --autosquash devtool-base', cwd=tempdir) | ||
2288 | result = runCmd('devtool finish %s meta-selftest' % recipe) | ||
2289 | result = runCmd('devtool status') | ||
2290 | self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t') | ||
2291 | self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish') | ||
2292 | expected_status = [(' M', '.*/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch$')] | ||
2293 | self._check_repo_status(recipedir, expected_status) | ||
2294 | |||
1780 | def test_devtool_rename(self): | 2295 | def test_devtool_rename(self): |
1781 | # Check preconditions | 2296 | # Check preconditions |
1782 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') | 2297 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') |
@@ -1813,7 +2328,6 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1813 | self._test_recipe_contents(newrecipefile, checkvars, []) | 2328 | self._test_recipe_contents(newrecipefile, checkvars, []) |
1814 | # Try again - change just name this time | 2329 | # Try again - change just name this time |
1815 | result = runCmd('devtool reset -n %s' % newrecipename) | 2330 | result = runCmd('devtool reset -n %s' % newrecipename) |
1816 | shutil.rmtree(newsrctree) | ||
1817 | add_recipe() | 2331 | add_recipe() |
1818 | newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever)) | 2332 | newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever)) |
1819 | result = runCmd('devtool rename %s %s' % (recipename, newrecipename)) | 2333 | result = runCmd('devtool rename %s %s' % (recipename, newrecipename)) |
@@ -1826,7 +2340,6 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1826 | self._test_recipe_contents(newrecipefile, checkvars, []) | 2340 | self._test_recipe_contents(newrecipefile, checkvars, []) |
1827 | # Try again - change just version this time | 2341 | # Try again - change just version this time |
1828 | result = runCmd('devtool reset -n %s' % newrecipename) | 2342 | result = runCmd('devtool reset -n %s' % newrecipename) |
1829 | shutil.rmtree(newsrctree) | ||
1830 | add_recipe() | 2343 | add_recipe() |
1831 | newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever)) | 2344 | newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever)) |
1832 | result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever)) | 2345 | result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever)) |
@@ -1858,8 +2371,9 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1858 | Expected: devtool modify is able to checkout the source of the kernel | 2371 | Expected: devtool modify is able to checkout the source of the kernel |
1859 | and modification to the source and configurations are reflected | 2372 | and modification to the source and configurations are reflected |
1860 | when building the kernel. | 2373 | when building the kernel. |
1861 | """ | 2374 | """ |
1862 | kernel_provider = get_bb_var('PREFERRED_PROVIDER_virtual/kernel') | 2375 | kernel_provider = self.td['PREFERRED_PROVIDER_virtual/kernel'] |
2376 | |||
1863 | # Clean up the environment | 2377 | # Clean up the environment |
1864 | bitbake('%s -c clean' % kernel_provider) | 2378 | bitbake('%s -c clean' % kernel_provider) |
1865 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | 2379 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') |
@@ -1886,33 +2400,545 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1886 | self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found') | 2400 | self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found') |
1887 | #Step 4.2 | 2401 | #Step 4.2 |
1888 | configfile = os.path.join(tempdir,'.config') | 2402 | configfile = os.path.join(tempdir,'.config') |
1889 | diff = runCmd('diff %s %s' % (tmpconfig, configfile)) | 2403 | runCmd('diff %s %s' % (tmpconfig, configfile)) |
1890 | self.assertEqual(0,diff.status,'Kernel .config file is not the same using bitbake and devtool') | 2404 | |
1891 | #Step 4.3 | 2405 | #Step 4.3 |
1892 | #NOTE: virtual/kernel is mapped to kernel_provider | 2406 | #NOTE: virtual/kernel is mapped to kernel_provider |
1893 | result = runCmd('devtool build %s' % kernel_provider) | 2407 | runCmd('devtool build %s' % kernel_provider) |
1894 | self.assertEqual(0,result.status,'Cannot build kernel using `devtool build`') | ||
1895 | kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux') | 2408 | kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux') |
1896 | self.assertExists(kernelfile, 'Kernel was not build correctly') | 2409 | self.assertExists(kernelfile, 'Kernel was not build correctly') |
1897 | 2410 | ||
1898 | #Modify the kernel source | 2411 | #Modify the kernel source |
1899 | modfile = os.path.join(tempdir,'arch/x86/boot/header.S') | 2412 | modfile = os.path.join(tempdir, 'init/version.c') |
1900 | modstring = "Use a boot loader. Devtool testing." | 2413 | # Moved to uts.h in 6.1 onwards |
1901 | modapplied = runCmd("sed -i 's/Use a boot loader./%s/' %s" % (modstring, modfile)) | 2414 | modfile2 = os.path.join(tempdir, 'include/linux/uts.h') |
1902 | self.assertEqual(0,modapplied.status,'Modification to %s on kernel source failed' % modfile) | 2415 | runCmd("sed -i 's/Linux/LiNuX/g' %s %s" % (modfile, modfile2)) |
2416 | |||
1903 | #Modify the configuration | 2417 | #Modify the configuration |
1904 | codeconfigfile = os.path.join(tempdir,'.config.new') | 2418 | codeconfigfile = os.path.join(tempdir, '.config.new') |
1905 | modconfopt = "CONFIG_SG_POOL=n" | 2419 | modconfopt = "CONFIG_SG_POOL=n" |
1906 | modconf = runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile)) | 2420 | runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile)) |
1907 | self.assertEqual(0,modconf.status,'Modification to %s failed' % codeconfigfile) | 2421 | |
1908 | #Build again kernel with devtool | 2422 | #Build again kernel with devtool |
1909 | rebuild = runCmd('devtool build %s' % kernel_provider) | 2423 | runCmd('devtool build %s' % kernel_provider) |
1910 | self.assertEqual(0,rebuild.status,'Fail to build kernel after modification of source and config') | 2424 | |
1911 | #Step 4.4 | 2425 | #Step 4.4 |
1912 | bzimagename = 'bzImage-' + get_bb_var('KERNEL_VERSION_NAME', kernel_provider) | 2426 | runCmd("grep '%s' %s" % ('LiNuX', kernelfile)) |
1913 | bzimagefile = os.path.join(get_bb_var('D', kernel_provider),'boot', bzimagename) | 2427 | |
1914 | checkmodcode = runCmd("grep '%s' %s" % (modstring, bzimagefile)) | ||
1915 | self.assertEqual(0,checkmodcode.status,'Modification on kernel source failed') | ||
1916 | #Step 4.5 | 2428 | #Step 4.5 |
1917 | checkmodconfg = runCmd("grep %s %s" % (modconfopt, codeconfigfile)) | 2429 | runCmd("grep %s %s" % (modconfopt, codeconfigfile)) |
1918 | self.assertEqual(0,checkmodconfg.status,'Modification to configuration file failed') | 2430 | |
2431 | |||
2432 | class DevtoolIdeSdkTests(DevtoolBase): | ||
2433 | def _write_bb_config(self, recipe_names): | ||
2434 | """Helper to write the bitbake local.conf file""" | ||
2435 | conf_lines = [ | ||
2436 | 'IMAGE_CLASSES += "image-combined-dbg"', | ||
2437 | 'IMAGE_GEN_DEBUGFS = "1"', | ||
2438 | 'IMAGE_INSTALL:append = " gdbserver %s"' % ' '.join( | ||
2439 | [r + '-ptest' for r in recipe_names]) | ||
2440 | ] | ||
2441 | self.write_config("\n".join(conf_lines)) | ||
2442 | |||
2443 | def _check_workspace(self): | ||
2444 | """Check if a workspace directory is available and setup the cleanup""" | ||
2445 | self.assertTrue(not os.path.exists(self.workspacedir), | ||
2446 | 'This test cannot be run with a workspace directory under the build directory') | ||
2447 | self.track_for_cleanup(self.workspacedir) | ||
2448 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
2449 | |||
2450 | def _workspace_scripts_dir(self, recipe_name): | ||
2451 | return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts')) | ||
2452 | |||
2453 | def _sources_scripts_dir(self, src_dir): | ||
2454 | return os.path.realpath(os.path.join(src_dir, 'oe-scripts')) | ||
2455 | |||
2456 | def _workspace_gdbinit_dir(self, recipe_name): | ||
2457 | return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts', 'gdbinit')) | ||
2458 | |||
2459 | def _sources_gdbinit_dir(self, src_dir): | ||
2460 | return os.path.realpath(os.path.join(src_dir, 'oe-gdbinit')) | ||
2461 | |||
2462 | def _devtool_ide_sdk_recipe(self, recipe_name, build_file, testimage): | ||
2463 | """Setup a recipe for working with devtool ide-sdk | ||
2464 | |||
2465 | Basically devtool modify -x followed by some tests | ||
2466 | """ | ||
2467 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
2468 | self.track_for_cleanup(tempdir) | ||
2469 | self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name) | ||
2470 | |||
2471 | result = runCmd('devtool modify %s -x %s' % (recipe_name, tempdir)) | ||
2472 | self.assertExists(os.path.join(tempdir, build_file), | ||
2473 | 'Extracted source could not be found') | ||
2474 | self.assertExists(os.path.join(self.workspacedir, 'conf', | ||
2475 | 'layer.conf'), 'Workspace directory not created') | ||
2476 | matches = glob.glob(os.path.join(self.workspacedir, | ||
2477 | 'appends', recipe_name + '.bbappend')) | ||
2478 | self.assertTrue(matches, 'bbappend not created %s' % result.output) | ||
2479 | |||
2480 | # Test devtool status | ||
2481 | result = runCmd('devtool status') | ||
2482 | self.assertIn(recipe_name, result.output) | ||
2483 | self.assertIn(tempdir, result.output) | ||
2484 | self._check_src_repo(tempdir) | ||
2485 | |||
2486 | # Usually devtool ide-sdk would initiate the build of the SDK. | ||
2487 | # But there is a circular dependency with starting Qemu and passing the IP of runqemu to devtool ide-sdk. | ||
2488 | if testimage: | ||
2489 | bitbake("%s qemu-native qemu-helper-native" % testimage) | ||
2490 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
2491 | self.add_command_to_tearDown('bitbake -c clean %s' % testimage) | ||
2492 | self.add_command_to_tearDown( | ||
2493 | 'rm -f %s/%s*' % (deploy_dir_image, testimage)) | ||
2494 | |||
2495 | return tempdir | ||
2496 | |||
2497 | def _get_recipe_ids(self, recipe_name): | ||
2498 | """IDs needed to write recipe specific config entries into IDE config files""" | ||
2499 | package_arch = get_bb_var('PACKAGE_ARCH', recipe_name) | ||
2500 | recipe_id = recipe_name + "-" + package_arch | ||
2501 | recipe_id_pretty = recipe_name + ": " + package_arch | ||
2502 | return (recipe_id, recipe_id_pretty) | ||
2503 | |||
2504 | def _verify_install_script_code(self, tempdir, recipe_name): | ||
2505 | """Verify the scripts referred by the tasks.json file are fine. | ||
2506 | |||
2507 | This function does not depend on Qemu. Therefore it verifies the scripts | ||
2508 | exists and the delete step works as expected. But it does not try to | ||
2509 | deploy to Qemu. | ||
2510 | """ | ||
2511 | recipe_id, recipe_id_pretty = self._get_recipe_ids(recipe_name) | ||
2512 | with open(os.path.join(tempdir, '.vscode', 'tasks.json')) as tasks_j: | ||
2513 | tasks_d = json.load(tasks_j) | ||
2514 | tasks = tasks_d["tasks"] | ||
2515 | task_install = next( | ||
2516 | (task for task in tasks if task["label"] == "install && deploy-target %s" % recipe_id_pretty), None) | ||
2517 | self.assertIsNot(task_install, None) | ||
2518 | # execute only the bb_run_do_install script since the deploy would require e.g. Qemu running. | ||
2519 | i_and_d_script = "install_and_deploy_" + recipe_id | ||
2520 | i_and_d_script_path = os.path.join( | ||
2521 | self._workspace_scripts_dir(recipe_name), i_and_d_script) | ||
2522 | self.assertExists(i_and_d_script_path) | ||
2523 | del_script = "delete_package_dirs_" + recipe_id | ||
2524 | del_script_path = os.path.join( | ||
2525 | self._workspace_scripts_dir(recipe_name), del_script) | ||
2526 | self.assertExists(del_script_path) | ||
2527 | runCmd(del_script_path, cwd=tempdir) | ||
2528 | |||
2529 | def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe): | ||
2530 | """Verify deployment and execution in Qemu system work for one recipe. | ||
2531 | |||
2532 | This function checks the entire SDK workflow: changing the code, recompiling | ||
2533 | it and deploying it back to Qemu, and checking that the changes have been | ||
2534 | incorporated into the provided binaries. It also runs the tests of the recipe. | ||
2535 | """ | ||
2536 | recipe_id, _ = self._get_recipe_ids(recipe_name) | ||
2537 | i_and_d_script = "install_and_deploy_" + recipe_id | ||
2538 | install_deploy_cmd = os.path.join( | ||
2539 | self._workspace_scripts_dir(recipe_name), i_and_d_script) | ||
2540 | self.assertExists(install_deploy_cmd, | ||
2541 | '%s script not found' % install_deploy_cmd) | ||
2542 | runCmd(install_deploy_cmd) | ||
2543 | |||
2544 | MAGIC_STRING_ORIG = "Magic: 123456789" | ||
2545 | MAGIC_STRING_NEW = "Magic: 987654321" | ||
2546 | ptest_cmd = "ptest-runner " + recipe_name | ||
2547 | |||
2548 | # validate that SSH is working | ||
2549 | status, _ = qemu.run("uname") | ||
2550 | self.assertEqual( | ||
2551 | status, 0, msg="Failed to connect to the SSH server on Qemu") | ||
2552 | |||
2553 | # Verify the unmodified example prints the magic string | ||
2554 | status, output = qemu.run(example_exe) | ||
2555 | self.assertEqual(status, 0, msg="%s failed: %s" % | ||
2556 | (example_exe, output)) | ||
2557 | self.assertIn(MAGIC_STRING_ORIG, output) | ||
2558 | |||
2559 | # Verify the unmodified ptests work | ||
2560 | status, output = qemu.run(ptest_cmd) | ||
2561 | self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output)) | ||
2562 | self.assertIn("PASS: cpp-example-lib", output) | ||
2563 | |||
2564 | # Verify remote debugging works | ||
2565 | self._gdb_cross_debugging( | ||
2566 | qemu, recipe_name, example_exe, MAGIC_STRING_ORIG) | ||
2567 | |||
2568 | # Replace the Magic String in the code, compile and deploy to Qemu | ||
2569 | cpp_example_lib_hpp = os.path.join(tempdir, 'cpp-example-lib.hpp') | ||
2570 | with open(cpp_example_lib_hpp, 'r') as file: | ||
2571 | cpp_code = file.read() | ||
2572 | cpp_code = cpp_code.replace(MAGIC_STRING_ORIG, MAGIC_STRING_NEW) | ||
2573 | with open(cpp_example_lib_hpp, 'w') as file: | ||
2574 | file.write(cpp_code) | ||
2575 | runCmd(install_deploy_cmd, cwd=tempdir) | ||
2576 | |||
2577 | # Verify the modified example prints the modified magic string | ||
2578 | status, output = qemu.run(example_exe) | ||
2579 | self.assertEqual(status, 0, msg="%s failed: %s" % | ||
2580 | (example_exe, output)) | ||
2581 | self.assertNotIn(MAGIC_STRING_ORIG, output) | ||
2582 | self.assertIn(MAGIC_STRING_NEW, output) | ||
2583 | |||
2584 | # Verify the modified example ptests work | ||
2585 | status, output = qemu.run(ptest_cmd) | ||
2586 | self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output)) | ||
2587 | self.assertIn("PASS: cpp-example-lib", output) | ||
2588 | |||
2589 | # Verify remote debugging works wit the modified magic string | ||
2590 | self._gdb_cross_debugging( | ||
2591 | qemu, recipe_name, example_exe, MAGIC_STRING_NEW) | ||
2592 | |||
2593 | def _gdb_cross(self): | ||
2594 | """Verify gdb-cross is provided by devtool ide-sdk""" | ||
2595 | target_arch = self.td["TARGET_ARCH"] | ||
2596 | target_sys = self.td["TARGET_SYS"] | ||
2597 | gdb_recipe = "gdb-cross-" + target_arch | ||
2598 | gdb_binary = target_sys + "-gdb" | ||
2599 | |||
2600 | native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe) | ||
2601 | r = runCmd("%s --version" % gdb_binary, | ||
2602 | native_sysroot=native_sysroot, target_sys=target_sys) | ||
2603 | self.assertEqual(r.status, 0) | ||
2604 | self.assertIn("GNU gdb", r.output) | ||
2605 | |||
2606 | def _gdb_cross_debugging(self, qemu, recipe_name, example_exe, magic_string): | ||
2607 | """Verify gdb-cross is working | ||
2608 | |||
2609 | Test remote debugging: | ||
2610 | break main | ||
2611 | run | ||
2612 | continue | ||
2613 | break CppExample::print_json() | ||
2614 | continue | ||
2615 | print CppExample::test_string.compare("cpp-example-lib Magic: 123456789") | ||
2616 | $1 = 0 | ||
2617 | print CppExample::test_string.compare("cpp-example-lib Magic: 123456789aaa") | ||
2618 | $2 = -3 | ||
2619 | list cpp-example-lib.hpp:13,13 | ||
2620 | 13 inline static const std::string test_string = "cpp-example-lib Magic: 123456789"; | ||
2621 | continue | ||
2622 | """ | ||
2623 | sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | ||
2624 | gdbserver_script = os.path.join(self._workspace_scripts_dir( | ||
2625 | recipe_name), 'gdbserver_1234_usr-bin-' + example_exe + '_m') | ||
2626 | gdb_script = os.path.join(self._workspace_scripts_dir( | ||
2627 | recipe_name), 'gdb_1234_usr-bin-' + example_exe) | ||
2628 | |||
2629 | # Start a gdbserver | ||
2630 | r = runCmd(gdbserver_script) | ||
2631 | self.assertEqual(r.status, 0) | ||
2632 | |||
2633 | # Check there is a gdbserver running | ||
2634 | r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps')) | ||
2635 | self.assertEqual(r.status, 0) | ||
2636 | self.assertIn("gdbserver ", r.output) | ||
2637 | |||
2638 | # Check the pid file is correct | ||
2639 | test_cmd = "cat /proc/$(cat /tmp/gdbserver_1234_usr-bin-" + \ | ||
2640 | example_exe + "/pid)/cmdline" | ||
2641 | r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, test_cmd)) | ||
2642 | self.assertEqual(r.status, 0) | ||
2643 | self.assertIn("gdbserver", r.output) | ||
2644 | |||
2645 | # Test remote debugging works | ||
2646 | gdb_batch_cmd = " --batch -ex 'break main' -ex 'run'" | ||
2647 | gdb_batch_cmd += " -ex 'break CppExample::print_json()' -ex 'continue'" | ||
2648 | gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %s\")'" % magic_string | ||
2649 | gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %saaa\")'" % magic_string | ||
2650 | gdb_batch_cmd += " -ex 'list cpp-example-lib.hpp:13,13'" | ||
2651 | gdb_batch_cmd += " -ex 'continue'" | ||
2652 | r = runCmd(gdb_script + gdb_batch_cmd) | ||
2653 | self.logger.debug("%s %s returned: %s", gdb_script, | ||
2654 | gdb_batch_cmd, r.output) | ||
2655 | self.assertEqual(r.status, 0) | ||
2656 | self.assertIn("Breakpoint 1, main", r.output) | ||
2657 | self.assertIn("$1 = 0", r.output) # test.string.compare equal | ||
2658 | self.assertIn("$2 = -3", r.output) # test.string.compare longer | ||
2659 | self.assertIn( | ||
2660 | 'inline static const std::string test_string = "cpp-example-lib %s";' % magic_string, r.output) | ||
2661 | self.assertIn("exited normally", r.output) | ||
2662 | |||
2663 | # Stop the gdbserver | ||
2664 | r = runCmd(gdbserver_script + ' stop') | ||
2665 | self.assertEqual(r.status, 0) | ||
2666 | |||
2667 | # Check there is no gdbserver running | ||
2668 | r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps')) | ||
2669 | self.assertEqual(r.status, 0) | ||
2670 | self.assertNotIn("gdbserver ", r.output) | ||
2671 | |||
2672 | def _verify_cmake_preset(self, tempdir): | ||
2673 | """Verify the generated cmake preset works as expected | ||
2674 | |||
2675 | Check if compiling works | ||
2676 | Check if unit tests can be executed in qemu (not qemu-system) | ||
2677 | """ | ||
2678 | with open(os.path.join(tempdir, 'CMakeUserPresets.json')) as cmake_preset_j: | ||
2679 | cmake_preset_d = json.load(cmake_preset_j) | ||
2680 | config_presets = cmake_preset_d["configurePresets"] | ||
2681 | self.assertEqual(len(config_presets), 1) | ||
2682 | cmake_exe = config_presets[0]["cmakeExecutable"] | ||
2683 | preset_name = config_presets[0]["name"] | ||
2684 | |||
2685 | # Verify the wrapper for cmake native is available | ||
2686 | self.assertExists(cmake_exe) | ||
2687 | |||
2688 | # Verify the cmake preset generated by devtool ide-sdk is available | ||
2689 | result = runCmd('%s --list-presets' % cmake_exe, cwd=tempdir) | ||
2690 | self.assertIn(preset_name, result.output) | ||
2691 | |||
2692 | # Verify cmake re-uses the o files compiled by bitbake | ||
2693 | result = runCmd('%s --build --preset %s' % | ||
2694 | (cmake_exe, preset_name), cwd=tempdir) | ||
2695 | self.assertIn("ninja: no work to do.", result.output) | ||
2696 | |||
2697 | # Verify the unit tests work (in Qemu user mode) | ||
2698 | result = runCmd('%s --build --preset %s --target test' % | ||
2699 | (cmake_exe, preset_name), cwd=tempdir) | ||
2700 | self.assertIn("100% tests passed", result.output) | ||
2701 | |||
2702 | # Verify re-building and testing works again | ||
2703 | result = runCmd('%s --build --preset %s --target clean' % | ||
2704 | (cmake_exe, preset_name), cwd=tempdir) | ||
2705 | self.assertIn("Cleaning", result.output) | ||
2706 | result = runCmd('%s --build --preset %s' % | ||
2707 | (cmake_exe, preset_name), cwd=tempdir) | ||
2708 | self.assertIn("Building", result.output) | ||
2709 | self.assertIn("Linking", result.output) | ||
2710 | result = runCmd('%s --build --preset %s --target test' % | ||
2711 | (cmake_exe, preset_name), cwd=tempdir) | ||
2712 | self.assertIn("Running tests...", result.output) | ||
2713 | self.assertIn("100% tests passed", result.output) | ||
2714 | |||
2715 | @OETestTag("runqemu") | ||
2716 | def test_devtool_ide_sdk_none_qemu(self): | ||
2717 | """Start qemu-system and run tests for multiple recipes. ide=none is used.""" | ||
2718 | recipe_names = ["cmake-example", "meson-example"] | ||
2719 | testimage = "oe-selftest-image" | ||
2720 | |||
2721 | self._check_workspace() | ||
2722 | self._write_bb_config(recipe_names) | ||
2723 | self._check_runqemu_prerequisites() | ||
2724 | |||
2725 | # Verify deployment to Qemu (system mode) works | ||
2726 | bitbake(testimage) | ||
2727 | with runqemu(testimage, runqemuparams="nographic") as qemu: | ||
2728 | # cmake-example recipe | ||
2729 | recipe_name = "cmake-example" | ||
2730 | example_exe = "cmake-example" | ||
2731 | build_file = "CMakeLists.txt" | ||
2732 | tempdir = self._devtool_ide_sdk_recipe( | ||
2733 | recipe_name, build_file, testimage) | ||
2734 | bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % ( | ||
2735 | recipe_name, testimage, qemu.ip) | ||
2736 | runCmd(bitbake_sdk_cmd) | ||
2737 | self._gdb_cross() | ||
2738 | self._verify_cmake_preset(tempdir) | ||
2739 | self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe) | ||
2740 | # Verify the oe-scripts sym-link is valid | ||
2741 | self.assertEqual(self._workspace_scripts_dir( | ||
2742 | recipe_name), self._sources_scripts_dir(tempdir)) | ||
2743 | |||
2744 | # meson-example recipe | ||
2745 | recipe_name = "meson-example" | ||
2746 | example_exe = "mesonex" | ||
2747 | build_file = "meson.build" | ||
2748 | tempdir = self._devtool_ide_sdk_recipe( | ||
2749 | recipe_name, build_file, testimage) | ||
2750 | bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % ( | ||
2751 | recipe_name, testimage, qemu.ip) | ||
2752 | runCmd(bitbake_sdk_cmd) | ||
2753 | self._gdb_cross() | ||
2754 | self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe) | ||
2755 | # Verify the oe-scripts sym-link is valid | ||
2756 | self.assertEqual(self._workspace_scripts_dir( | ||
2757 | recipe_name), self._sources_scripts_dir(tempdir)) | ||
2758 | |||
2759 | def test_devtool_ide_sdk_code_cmake(self): | ||
2760 | """Verify a cmake recipe works with ide=code mode""" | ||
2761 | recipe_name = "cmake-example" | ||
2762 | build_file = "CMakeLists.txt" | ||
2763 | testimage = "oe-selftest-image" | ||
2764 | |||
2765 | self._check_workspace() | ||
2766 | self._write_bb_config([recipe_name]) | ||
2767 | tempdir = self._devtool_ide_sdk_recipe( | ||
2768 | recipe_name, build_file, testimage) | ||
2769 | bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % ( | ||
2770 | recipe_name, testimage) | ||
2771 | runCmd(bitbake_sdk_cmd) | ||
2772 | self._verify_cmake_preset(tempdir) | ||
2773 | self._verify_install_script_code(tempdir, recipe_name) | ||
2774 | self._gdb_cross() | ||
2775 | |||
2776 | def test_devtool_ide_sdk_code_meson(self): | ||
2777 | """Verify a meson recipe works with ide=code mode""" | ||
2778 | recipe_name = "meson-example" | ||
2779 | build_file = "meson.build" | ||
2780 | testimage = "oe-selftest-image" | ||
2781 | |||
2782 | self._check_workspace() | ||
2783 | self._write_bb_config([recipe_name]) | ||
2784 | tempdir = self._devtool_ide_sdk_recipe( | ||
2785 | recipe_name, build_file, testimage) | ||
2786 | bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % ( | ||
2787 | recipe_name, testimage) | ||
2788 | runCmd(bitbake_sdk_cmd) | ||
2789 | |||
2790 | with open(os.path.join(tempdir, '.vscode', 'settings.json')) as settings_j: | ||
2791 | settings_d = json.load(settings_j) | ||
2792 | meson_exe = settings_d["mesonbuild.mesonPath"] | ||
2793 | meson_build_folder = settings_d["mesonbuild.buildFolder"] | ||
2794 | |||
2795 | # Verify the wrapper for meson native is available | ||
2796 | self.assertExists(meson_exe) | ||
2797 | |||
2798 | # Verify meson re-uses the o files compiled by bitbake | ||
2799 | result = runCmd('%s compile -C %s' % | ||
2800 | (meson_exe, meson_build_folder), cwd=tempdir) | ||
2801 | self.assertIn("ninja: no work to do.", result.output) | ||
2802 | |||
2803 | # Verify the unit tests work (in Qemu) | ||
2804 | runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir) | ||
2805 | |||
2806 | # Verify re-building and testing works again | ||
2807 | result = runCmd('%s compile -C %s --clean' % | ||
2808 | (meson_exe, meson_build_folder), cwd=tempdir) | ||
2809 | self.assertIn("Cleaning...", result.output) | ||
2810 | result = runCmd('%s compile -C %s' % | ||
2811 | (meson_exe, meson_build_folder), cwd=tempdir) | ||
2812 | self.assertIn("Linking target", result.output) | ||
2813 | runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir) | ||
2814 | |||
2815 | self._verify_install_script_code(tempdir, recipe_name) | ||
2816 | self._gdb_cross() | ||
2817 | |||
2818 | def test_devtool_ide_sdk_shared_sysroots(self): | ||
2819 | """Verify the shared sysroot SDK""" | ||
2820 | |||
2821 | # Handle the workspace (which is not needed by this test case) | ||
2822 | self._check_workspace() | ||
2823 | |||
2824 | result_init = runCmd( | ||
2825 | 'devtool ide-sdk -m shared oe-selftest-image cmake-example meson-example --ide=code') | ||
2826 | bb_vars = get_bb_vars( | ||
2827 | ['REAL_MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'], "meta-ide-support") | ||
2828 | environment_script = 'environment-setup-%s' % bb_vars['REAL_MULTIMACH_TARGET_SYS'] | ||
2829 | deploydir = bb_vars['DEPLOY_DIR_IMAGE'] | ||
2830 | environment_script_path = os.path.join(deploydir, environment_script) | ||
2831 | cpp_example_src = os.path.join( | ||
2832 | bb_vars['COREBASE'], 'meta-selftest', 'recipes-test', 'cpp', 'files') | ||
2833 | |||
2834 | # Verify the cross environment script is available | ||
2835 | self.assertExists(environment_script_path) | ||
2836 | |||
2837 | def runCmdEnv(cmd, cwd): | ||
2838 | cmd = '/bin/sh -c ". %s > /dev/null && %s"' % ( | ||
2839 | environment_script_path, cmd) | ||
2840 | return runCmd(cmd, cwd) | ||
2841 | |||
2842 | # Verify building the C++ example works with CMake | ||
2843 | tempdir_cmake = tempfile.mkdtemp(prefix='devtoolqa') | ||
2844 | self.track_for_cleanup(tempdir_cmake) | ||
2845 | |||
2846 | result_cmake = runCmdEnv("which cmake", cwd=tempdir_cmake) | ||
2847 | cmake_native = os.path.normpath(result_cmake.output.strip()) | ||
2848 | self.assertExists(cmake_native) | ||
2849 | |||
2850 | runCmdEnv('cmake %s' % cpp_example_src, cwd=tempdir_cmake) | ||
2851 | runCmdEnv('cmake --build %s' % tempdir_cmake, cwd=tempdir_cmake) | ||
2852 | |||
2853 | # Verify the printed note really referres to a cmake executable | ||
2854 | cmake_native_code = "" | ||
2855 | for line in result_init.output.splitlines(): | ||
2856 | m = re.search(r'"cmake.cmakePath": "(.*)"', line) | ||
2857 | if m: | ||
2858 | cmake_native_code = m.group(1) | ||
2859 | break | ||
2860 | self.assertExists(cmake_native_code) | ||
2861 | self.assertEqual(cmake_native, cmake_native_code) | ||
2862 | |||
2863 | # Verify building the C++ example works with Meson | ||
2864 | tempdir_meson = tempfile.mkdtemp(prefix='devtoolqa') | ||
2865 | self.track_for_cleanup(tempdir_meson) | ||
2866 | |||
2867 | result_cmake = runCmdEnv("which meson", cwd=tempdir_meson) | ||
2868 | meson_native = os.path.normpath(result_cmake.output.strip()) | ||
2869 | self.assertExists(meson_native) | ||
2870 | |||
2871 | runCmdEnv('meson setup %s' % tempdir_meson, cwd=cpp_example_src) | ||
2872 | runCmdEnv('meson compile', cwd=tempdir_meson) | ||
2873 | |||
2874 | def test_devtool_ide_sdk_plugins(self): | ||
2875 | """Test that devtool ide-sdk can use plugins from other layers.""" | ||
2876 | |||
2877 | # We need a workspace layer and a modified recipe (but no image) | ||
2878 | modified_recipe_name = "meson-example" | ||
2879 | modified_build_file = "meson.build" | ||
2880 | testimage = "oe-selftest-image" | ||
2881 | shared_recipe_name = "cmake-example" | ||
2882 | |||
2883 | self._check_workspace() | ||
2884 | self._write_bb_config([modified_recipe_name]) | ||
2885 | tempdir = self._devtool_ide_sdk_recipe( | ||
2886 | modified_recipe_name, modified_build_file, None) | ||
2887 | |||
2888 | IDE_RE = re.compile(r'.*--ide \{(.*)\}.*') | ||
2889 | |||
2890 | def get_ides_from_help(help_str): | ||
2891 | m = IDE_RE.search(help_str) | ||
2892 | return m.group(1).split(',') | ||
2893 | |||
2894 | # verify the default plugins are available but the foo plugin is not | ||
2895 | result = runCmd('devtool ide-sdk -h') | ||
2896 | found_ides = get_ides_from_help(result.output) | ||
2897 | self.assertIn('code', found_ides) | ||
2898 | self.assertIn('none', found_ides) | ||
2899 | self.assertNotIn('foo', found_ides) | ||
2900 | |||
2901 | shared_config_file = os.path.join(tempdir, 'shared-config.txt') | ||
2902 | shared_config_str = 'Dummy shared IDE config' | ||
2903 | modified_config_file = os.path.join(tempdir, 'modified-config.txt') | ||
2904 | modified_config_str = 'Dummy modified IDE config' | ||
2905 | |||
2906 | # Generate a foo plugin in the workspace layer | ||
2907 | plugin_dir = os.path.join( | ||
2908 | self.workspacedir, 'lib', 'devtool', 'ide_plugins') | ||
2909 | os.makedirs(plugin_dir) | ||
2910 | plugin_code = 'from devtool.ide_plugins import IdeBase\n\n' | ||
2911 | plugin_code += 'class IdeFoo(IdeBase):\n' | ||
2912 | plugin_code += ' def setup_shared_sysroots(self, shared_env):\n' | ||
2913 | plugin_code += ' with open("%s", "w") as config_file:\n' % shared_config_file | ||
2914 | plugin_code += ' config_file.write("%s")\n\n' % shared_config_str | ||
2915 | plugin_code += ' def setup_modified_recipe(self, args, image_recipe, modified_recipe):\n' | ||
2916 | plugin_code += ' with open("%s", "w") as config_file:\n' % modified_config_file | ||
2917 | plugin_code += ' config_file.write("%s")\n\n' % modified_config_str | ||
2918 | plugin_code += 'def register_ide_plugin(ide_plugins):\n' | ||
2919 | plugin_code += ' ide_plugins["foo"] = IdeFoo\n' | ||
2920 | |||
2921 | plugin_py = os.path.join(plugin_dir, 'ide_foo.py') | ||
2922 | with open(plugin_py, 'w') as plugin_file: | ||
2923 | plugin_file.write(plugin_code) | ||
2924 | |||
2925 | # Verify the foo plugin is available as well | ||
2926 | result = runCmd('devtool ide-sdk -h') | ||
2927 | found_ides = get_ides_from_help(result.output) | ||
2928 | self.assertIn('code', found_ides) | ||
2929 | self.assertIn('none', found_ides) | ||
2930 | self.assertIn('foo', found_ides) | ||
2931 | |||
2932 | # Verify the foo plugin generates a shared config | ||
2933 | result = runCmd( | ||
2934 | 'devtool ide-sdk -m shared --skip-bitbake --ide foo %s' % shared_recipe_name) | ||
2935 | with open(shared_config_file) as shared_config: | ||
2936 | shared_config_new = shared_config.read() | ||
2937 | self.assertEqual(shared_config_str, shared_config_new) | ||
2938 | |||
2939 | # Verify the foo plugin generates a modified config | ||
2940 | result = runCmd('devtool ide-sdk --skip-bitbake --ide foo %s %s' % | ||
2941 | (modified_recipe_name, testimage)) | ||
2942 | with open(modified_config_file) as modified_config: | ||
2943 | modified_config_new = modified_config.read() | ||
2944 | self.assertEqual(modified_config_str, modified_config_new) | ||
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py index e1cfc3b621..ad952c004b 100644 --- a/meta/lib/oeqa/selftest/cases/distrodata.py +++ b/meta/lib/oeqa/selftest/cases/distrodata.py | |||
@@ -1,11 +1,10 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | from oeqa.selftest.case import OESelftestTestCase | 7 | from oeqa.selftest.case import OESelftestTestCase |
6 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars | ||
7 | from oeqa.utils.decorators import testcase | ||
8 | from oeqa.utils.ftools import write_file | ||
9 | 8 | ||
10 | import oe.recipeutils | 9 | import oe.recipeutils |
11 | 10 | ||
@@ -18,7 +17,7 @@ class Distrodata(OESelftestTestCase): | |||
18 | Product: oe-core | 17 | Product: oe-core |
19 | Author: Alexander Kanavin <alex.kanavin@gmail.com> | 18 | Author: Alexander Kanavin <alex.kanavin@gmail.com> |
20 | """ | 19 | """ |
21 | feature = 'LICENSE_FLAGS_WHITELIST += " commercial"\n' | 20 | feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n' |
22 | self.write_config(feature) | 21 | self.write_config(feature) |
23 | 22 | ||
24 | pkgs = oe.recipeutils.get_recipe_upgrade_status() | 23 | pkgs = oe.recipeutils.get_recipe_upgrade_status() |
@@ -49,21 +48,21 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re | |||
49 | Author: Alexander Kanavin <alex.kanavin@gmail.com> | 48 | Author: Alexander Kanavin <alex.kanavin@gmail.com> |
50 | """ | 49 | """ |
51 | def is_exception(pkg): | 50 | def is_exception(pkg): |
52 | exceptions = ["packagegroup-", "initramfs-", "systemd-machine-units", "target-sdk-provides-dummy"] | 51 | exceptions = ["packagegroup-",] |
53 | for i in exceptions: | 52 | for i in exceptions: |
54 | if i in pkg: | 53 | if i in pkg: |
55 | return True | 54 | return True |
56 | return False | 55 | return False |
57 | 56 | ||
58 | def is_maintainer_exception(entry): | 57 | def is_maintainer_exception(entry): |
59 | exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", | 58 | exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data", |
60 | "cve-update-db-native"] | 59 | "cve-update-nvd2-native",] |
61 | for i in exceptions: | 60 | for i in exceptions: |
62 | if i in entry: | 61 | if i in entry: |
63 | return True | 62 | return True |
64 | return False | 63 | return False |
65 | 64 | ||
66 | feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_WHITELIST += " commercial"\nPARSE_ALL_RECIPES = "1"\n' | 65 | feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_ACCEPTED += " commercial"\nPARSE_ALL_RECIPES = "1"\nPACKAGE_CLASSES = "package_ipk package_deb package_rpm"\n' |
67 | self.write_config(feature) | 66 | self.write_config(feature) |
68 | 67 | ||
69 | with bb.tinfoil.Tinfoil() as tinfoil: | 68 | with bb.tinfoil.Tinfoil() as tinfoil: |
@@ -74,7 +73,7 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re | |||
74 | 73 | ||
75 | missing_recipes = [] | 74 | missing_recipes = [] |
76 | recipes = [] | 75 | recipes = [] |
77 | prefix = "RECIPE_MAINTAINER_pn-" | 76 | prefix = "RECIPE_MAINTAINER:pn-" |
78 | 77 | ||
79 | # We could have used all_recipes() here, but this method will find | 78 | # We could have used all_recipes() here, but this method will find |
80 | # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files | 79 | # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files |
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py index a61cf9bcb3..fa74103dec 100644 --- a/meta/lib/oeqa/selftest/cases/efibootpartition.py +++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py | |||
@@ -5,42 +5,29 @@ | |||
5 | # SPDX-License-Identifier: MIT | 5 | # SPDX-License-Identifier: MIT |
6 | # | 6 | # |
7 | 7 | ||
8 | import re | ||
9 | |||
10 | from oeqa.selftest.case import OESelftestTestCase | 8 | from oeqa.selftest.case import OESelftestTestCase |
11 | from oeqa.utils.commands import bitbake, runqemu, get_bb_var | 9 | from oeqa.utils.commands import bitbake, runqemu |
10 | from oeqa.core.decorator.data import skipIfNotMachine | ||
11 | import oe.types | ||
12 | 12 | ||
13 | class GenericEFITest(OESelftestTestCase): | 13 | class GenericEFITest(OESelftestTestCase): |
14 | """EFI booting test class""" | 14 | """EFI booting test class""" |
15 | @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently") | ||
16 | def test_boot_efi(self): | ||
17 | cmd = "runqemu nographic serial wic ovmf" | ||
18 | if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]): | ||
19 | cmd += " kvm" | ||
20 | image = "core-image-minimal" | ||
15 | 21 | ||
16 | cmd_common = "runqemu nographic serial wic ovmf" | 22 | self.write_config(""" |
17 | efi_provider = "systemd-boot" | 23 | EFI_PROVIDER = "systemd-boot" |
18 | image = "core-image-minimal" | 24 | IMAGE_FSTYPES:pn-%s:append = " wic" |
19 | machine = "qemux86-64" | 25 | MACHINE_FEATURES:append = " efi" |
20 | recipes_built = False | ||
21 | |||
22 | @classmethod | ||
23 | def setUpLocal(self): | ||
24 | super(GenericEFITest, self).setUpLocal(self) | ||
25 | |||
26 | self.write_config(self, | ||
27 | """ | ||
28 | EFI_PROVIDER = "%s" | ||
29 | IMAGE_FSTYPES_pn-%s_append = " wic" | ||
30 | MACHINE = "%s" | ||
31 | MACHINE_FEATURES_append = " efi" | ||
32 | WKS_FILE = "efi-bootdisk.wks.in" | 26 | WKS_FILE = "efi-bootdisk.wks.in" |
33 | IMAGE_INSTALL_append = " grub-efi systemd-boot kernel-image-bzimage" | 27 | IMAGE_INSTALL:append = " grub-efi systemd-boot kernel-image-bzimage" |
34 | """ | 28 | """ |
35 | % (self.efi_provider, self.image, self.machine)) | 29 | % (image)) |
36 | if not self.recipes_built: | ||
37 | bitbake("ovmf") | ||
38 | bitbake(self.image) | ||
39 | self.recipes_built = True | ||
40 | 30 | ||
41 | @classmethod | 31 | bitbake(image + " ovmf") |
42 | def test_boot_efi(self): | 32 | with runqemu(image, ssh=False, launch_cmd=cmd) as qemu: |
43 | """Test generic boot partition with qemu""" | ||
44 | cmd = "%s %s" % (self.cmd_common, self.machine) | ||
45 | with runqemu(self.image, ssh=False, launch_cmd=cmd) as qemu: | ||
46 | self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) | 33 | self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) |
diff --git a/meta/lib/oeqa/selftest/cases/eSDK.py b/meta/lib/oeqa/selftest/cases/esdk.py index 862849af35..9f5de2cde7 100644 --- a/meta/lib/oeqa/selftest/cases/eSDK.py +++ b/meta/lib/oeqa/selftest/cases/esdk.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -8,7 +10,7 @@ import os | |||
8 | import glob | 10 | import glob |
9 | import time | 11 | import time |
10 | from oeqa.selftest.case import OESelftestTestCase | 12 | from oeqa.selftest.case import OESelftestTestCase |
11 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars | 13 | from oeqa.utils.commands import runCmd, bitbake, get_bb_vars |
12 | 14 | ||
13 | class oeSDKExtSelfTest(OESelftestTestCase): | 15 | class oeSDKExtSelfTest(OESelftestTestCase): |
14 | """ | 16 | """ |
@@ -63,7 +65,7 @@ class oeSDKExtSelfTest(OESelftestTestCase): | |||
63 | cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA) | 65 | cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA) |
64 | 66 | ||
65 | sstate_config=""" | 67 | sstate_config=""" |
66 | SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" | 68 | ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS" |
67 | SSTATE_MIRRORS = "file://.* file://%s/PATH" | 69 | SSTATE_MIRRORS = "file://.* file://%s/PATH" |
68 | CORE_IMAGE_EXTRA_INSTALL = "perl" | 70 | CORE_IMAGE_EXTRA_INSTALL = "perl" |
69 | """ % sstate_dir | 71 | """ % sstate_dir |
@@ -91,7 +93,7 @@ CORE_IMAGE_EXTRA_INSTALL = "perl" | |||
91 | 93 | ||
92 | # Configure eSDK to use sstate mirror from poky | 94 | # Configure eSDK to use sstate mirror from poky |
93 | sstate_config=""" | 95 | sstate_config=""" |
94 | SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" | 96 | ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS" |
95 | SSTATE_MIRRORS = "file://.* file://%s/PATH" | 97 | SSTATE_MIRRORS = "file://.* file://%s/PATH" |
96 | """ % bb_vars["SSTATE_DIR"] | 98 | """ % bb_vars["SSTATE_DIR"] |
97 | with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f: | 99 | with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f: |
@@ -100,7 +102,7 @@ SSTATE_MIRRORS = "file://.* file://%s/PATH" | |||
100 | @classmethod | 102 | @classmethod |
101 | def tearDownClass(cls): | 103 | def tearDownClass(cls): |
102 | for i in range(0, 10): | 104 | for i in range(0, 10): |
103 | if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')): | 105 | if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')): |
104 | time.sleep(1) | 106 | time.sleep(1) |
105 | else: | 107 | else: |
106 | break | 108 | break |
diff --git a/meta/lib/oeqa/selftest/cases/externalsrc.py b/meta/lib/oeqa/selftest/cases/externalsrc.py new file mode 100644 index 0000000000..1d800dc82c --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/externalsrc.py | |||
@@ -0,0 +1,44 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import os | ||
8 | import shutil | ||
9 | import tempfile | ||
10 | |||
11 | from oeqa.selftest.case import OESelftestTestCase | ||
12 | from oeqa.utils.commands import get_bb_var, runCmd | ||
13 | |||
14 | class ExternalSrc(OESelftestTestCase): | ||
15 | # test that srctree_hash_files does not crash | ||
16 | # we should be actually checking do_compile[file-checksums] but oeqa currently does not support it | ||
17 | # so we check only that a recipe with externalsrc can be parsed | ||
18 | def test_externalsrc_srctree_hash_files(self): | ||
19 | test_recipe = "git-submodule-test" | ||
20 | git_url = "git://git.yoctoproject.org/git-submodule-test" | ||
21 | externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name | ||
22 | |||
23 | self.write_config( | ||
24 | """ | ||
25 | INHERIT += "externalsrc" | ||
26 | EXTERNALSRC:pn-%s = "%s" | ||
27 | """ % (test_recipe, externalsrc_dir) | ||
28 | ) | ||
29 | |||
30 | # test with git without submodules | ||
31 | runCmd('git clone %s %s' % (git_url, externalsrc_dir)) | ||
32 | os.unlink(externalsrc_dir + "/.gitmodules") | ||
33 | open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing | ||
34 | self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC") | ||
35 | os.unlink(".gitmodules") | ||
36 | |||
37 | # test with git with submodules | ||
38 | runCmd('git checkout .gitmodules', cwd=externalsrc_dir) | ||
39 | runCmd('git submodule update --init --recursive', cwd=externalsrc_dir) | ||
40 | self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC") | ||
41 | |||
42 | # test without git | ||
43 | shutil.rmtree(os.path.join(externalsrc_dir, ".git")) | ||
44 | self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC") | ||
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py index 76cbadf2ff..44099176fc 100644 --- a/meta/lib/oeqa/selftest/cases/fetch.py +++ b/meta/lib/oeqa/selftest/cases/fetch.py | |||
@@ -1,7 +1,12 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
7 | import tempfile | ||
8 | import textwrap | ||
9 | import bb.tinfoil | ||
5 | import oe.path | 10 | import oe.path |
6 | from oeqa.selftest.case import OESelftestTestCase | 11 | from oeqa.selftest.case import OESelftestTestCase |
7 | from oeqa.utils.commands import bitbake | 12 | from oeqa.utils.commands import bitbake |
@@ -21,8 +26,8 @@ class Fetch(OESelftestTestCase): | |||
21 | # No mirrors, should use git to fetch successfully | 26 | # No mirrors, should use git to fetch successfully |
22 | features = """ | 27 | features = """ |
23 | DL_DIR = "%s" | 28 | DL_DIR = "%s" |
24 | MIRRORS_forcevariable = "" | 29 | MIRRORS:forcevariable = "" |
25 | PREMIRRORS_forcevariable = "" | 30 | PREMIRRORS:forcevariable = "" |
26 | """ % dldir | 31 | """ % dldir |
27 | self.write_config(features) | 32 | self.write_config(features) |
28 | oe.path.remove(dldir, recurse=True) | 33 | oe.path.remove(dldir, recurse=True) |
@@ -31,9 +36,10 @@ PREMIRRORS_forcevariable = "" | |||
31 | # No mirrors and broken git, should fail | 36 | # No mirrors and broken git, should fail |
32 | features = """ | 37 | features = """ |
33 | DL_DIR = "%s" | 38 | DL_DIR = "%s" |
39 | SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git" | ||
34 | GIT_PROXY_COMMAND = "false" | 40 | GIT_PROXY_COMMAND = "false" |
35 | MIRRORS_forcevariable = "" | 41 | MIRRORS:forcevariable = "" |
36 | PREMIRRORS_forcevariable = "" | 42 | PREMIRRORS:forcevariable = "" |
37 | """ % dldir | 43 | """ % dldir |
38 | self.write_config(features) | 44 | self.write_config(features) |
39 | oe.path.remove(dldir, recurse=True) | 45 | oe.path.remove(dldir, recurse=True) |
@@ -43,9 +49,62 @@ PREMIRRORS_forcevariable = "" | |||
43 | # Broken git but a specific mirror | 49 | # Broken git but a specific mirror |
44 | features = """ | 50 | features = """ |
45 | DL_DIR = "%s" | 51 | DL_DIR = "%s" |
52 | SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git" | ||
46 | GIT_PROXY_COMMAND = "false" | 53 | GIT_PROXY_COMMAND = "false" |
47 | MIRRORS_forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/" | 54 | MIRRORS:forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/" |
48 | """ % dldir | 55 | """ % dldir |
49 | self.write_config(features) | 56 | self.write_config(features) |
50 | oe.path.remove(dldir, recurse=True) | 57 | oe.path.remove(dldir, recurse=True) |
51 | bitbake("dbus-wait -c fetch -f") | 58 | bitbake("dbus-wait -c fetch -f") |
59 | |||
60 | |||
61 | class Dependencies(OESelftestTestCase): | ||
62 | def write_recipe(self, content, tempdir): | ||
63 | f = os.path.join(tempdir, "test.bb") | ||
64 | with open(f, "w") as fd: | ||
65 | fd.write(content) | ||
66 | return f | ||
67 | |||
68 | def test_dependencies(self): | ||
69 | """ | ||
70 | Verify that the correct dependencies are generated for specific SRC_URI entries. | ||
71 | """ | ||
72 | |||
73 | with bb.tinfoil.Tinfoil() as tinfoil, tempfile.TemporaryDirectory(prefix="selftest-fetch") as tempdir: | ||
74 | tinfoil.prepare(config_only=False, quiet=2) | ||
75 | |||
76 | r = """ | ||
77 | LICENSE="CLOSED" | ||
78 | SRC_URI="http://example.com/tarball.zip" | ||
79 | """ | ||
80 | f = self.write_recipe(textwrap.dedent(r), tempdir) | ||
81 | d = tinfoil.parse_recipe_file(f) | ||
82 | self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends")) | ||
83 | self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends")) | ||
84 | |||
85 | # Verify that the downloadfilename overrides the URI | ||
86 | r = """ | ||
87 | LICENSE="CLOSED" | ||
88 | SRC_URI="https://example.com/tarball;downloadfilename=something.zip" | ||
89 | """ | ||
90 | f = self.write_recipe(textwrap.dedent(r), tempdir) | ||
91 | d = tinfoil.parse_recipe_file(f) | ||
92 | self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends")) | ||
93 | self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "") | ||
94 | |||
95 | r = """ | ||
96 | LICENSE="CLOSED" | ||
97 | SRC_URI="ftp://example.com/tarball.lz" | ||
98 | """ | ||
99 | f = self.write_recipe(textwrap.dedent(r), tempdir) | ||
100 | d = tinfoil.parse_recipe_file(f) | ||
101 | self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends")) | ||
102 | self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends")) | ||
103 | |||
104 | r = """ | ||
105 | LICENSE="CLOSED" | ||
106 | SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff" | ||
107 | """ | ||
108 | f = self.write_recipe(textwrap.dedent(r), tempdir) | ||
109 | d = tinfoil.parse_recipe_file(f) | ||
110 | self.assertIn("git-native", d.getVarFlag("do_fetch", "depends")) | ||
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py index 02692de822..347c065377 100644 --- a/meta/lib/oeqa/selftest/cases/fitimage.py +++ b/meta/lib/oeqa/selftest/cases/fitimage.py | |||
@@ -1,11 +1,12 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | from oeqa.selftest.case import OESelftestTestCase | 7 | from oeqa.selftest.case import OESelftestTestCase |
6 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu | 8 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars |
7 | import os | 9 | import os |
8 | import json | ||
9 | import re | 10 | import re |
10 | 11 | ||
11 | class FitImageTests(OESelftestTestCase): | 12 | class FitImageTests(OESelftestTestCase): |
@@ -32,6 +33,8 @@ KERNEL_CLASSES = " kernel-fitimage " | |||
32 | # RAM disk variables including load address and entrypoint for kernel and RAM disk | 33 | # RAM disk variables including load address and entrypoint for kernel and RAM disk |
33 | IMAGE_FSTYPES += "cpio.gz" | 34 | IMAGE_FSTYPES += "cpio.gz" |
34 | INITRAMFS_IMAGE = "core-image-minimal" | 35 | INITRAMFS_IMAGE = "core-image-minimal" |
36 | # core-image-minimal is used as initramfs here, drop the rootfs suffix | ||
37 | IMAGE_NAME_SUFFIX:pn-core-image-minimal = "" | ||
35 | UBOOT_RD_LOADADDRESS = "0x88000000" | 38 | UBOOT_RD_LOADADDRESS = "0x88000000" |
36 | UBOOT_RD_ENTRYPOINT = "0x88000000" | 39 | UBOOT_RD_ENTRYPOINT = "0x88000000" |
37 | UBOOT_LOADADDRESS = "0x80080000" | 40 | UBOOT_LOADADDRESS = "0x80080000" |
@@ -41,15 +44,14 @@ FIT_DESC = "A model description" | |||
41 | self.write_config(config) | 44 | self.write_config(config) |
42 | 45 | ||
43 | # fitImage is created as part of linux recipe | 46 | # fitImage is created as part of linux recipe |
44 | bitbake("virtual/kernel") | 47 | image = "virtual/kernel" |
48 | bitbake(image) | ||
49 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'INITRAMFS_IMAGE_NAME', 'KERNEL_FIT_LINK_NAME'], image) | ||
45 | 50 | ||
46 | image_type = "core-image-minimal" | 51 | fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], |
47 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 52 | "fitImage-its-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME'])) |
48 | machine = get_bb_var('MACHINE') | 53 | fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], |
49 | fitimage_its_path = os.path.join(deploy_dir_image, | 54 | "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME'])) |
50 | "fitImage-its-%s-%s-%s" % (image_type, machine, machine)) | ||
51 | fitimage_path = os.path.join(deploy_dir_image, | ||
52 | "fitImage-%s-%s-%s" % (image_type, machine, machine)) | ||
53 | 55 | ||
54 | self.assertTrue(os.path.exists(fitimage_its_path), | 56 | self.assertTrue(os.path.exists(fitimage_its_path), |
55 | "%s image tree source doesn't exist" % (fitimage_its_path)) | 57 | "%s image tree source doesn't exist" % (fitimage_its_path)) |
@@ -114,22 +116,22 @@ KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper " | |||
114 | UBOOT_SIGN_ENABLE = "1" | 116 | UBOOT_SIGN_ENABLE = "1" |
115 | FIT_GENERATE_KEYS = "1" | 117 | FIT_GENERATE_KEYS = "1" |
116 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | 118 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" |
117 | UBOOT_SIGN_KEYNAME = "oe-selftest" | 119 | UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" |
120 | UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" | ||
118 | FIT_SIGN_INDIVIDUAL = "1" | 121 | FIT_SIGN_INDIVIDUAL = "1" |
119 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" | 122 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" |
120 | """ | 123 | """ |
121 | self.write_config(config) | 124 | self.write_config(config) |
122 | 125 | ||
123 | # fitImage is created as part of linux recipe | 126 | # fitImage is created as part of linux recipe |
124 | bitbake("virtual/kernel") | 127 | image = "virtual/kernel" |
128 | bitbake(image) | ||
129 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'KERNEL_FIT_LINK_NAME'], image) | ||
125 | 130 | ||
126 | image_type = "core-image-minimal" | 131 | fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], |
127 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 132 | "fitImage-its-%s" % (bb_vars['KERNEL_FIT_LINK_NAME'])) |
128 | machine = get_bb_var('MACHINE') | 133 | fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], |
129 | fitimage_its_path = os.path.join(deploy_dir_image, | 134 | "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME'])) |
130 | "fitImage-its-%s" % (machine,)) | ||
131 | fitimage_path = os.path.join(deploy_dir_image, | ||
132 | "fitImage-%s.bin" % (machine,)) | ||
133 | 135 | ||
134 | self.assertTrue(os.path.exists(fitimage_its_path), | 136 | self.assertTrue(os.path.exists(fitimage_its_path), |
135 | "%s image tree source doesn't exist" % (fitimage_its_path)) | 137 | "%s image tree source doesn't exist" % (fitimage_its_path)) |
@@ -173,11 +175,11 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" | |||
173 | 175 | ||
174 | reqsigvalues_image = { | 176 | reqsigvalues_image = { |
175 | 'algo': '"sha256,rsa2048"', | 177 | 'algo': '"sha256,rsa2048"', |
176 | 'key-name-hint': '"oe-selftest"', | 178 | 'key-name-hint': '"img-oe-selftest"', |
177 | } | 179 | } |
178 | reqsigvalues_config = { | 180 | reqsigvalues_config = { |
179 | 'algo': '"sha256,rsa2048"', | 181 | 'algo': '"sha256,rsa2048"', |
180 | 'key-name-hint': '"oe-selftest"', | 182 | 'key-name-hint': '"cfg-oe-selftest"', |
181 | 'sign-images': '"kernel", "fdt"', | 183 | 'sign-images': '"kernel", "fdt"', |
182 | } | 184 | } |
183 | 185 | ||
@@ -202,7 +204,7 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" | |||
202 | signed_sections = {} | 204 | signed_sections = {} |
203 | for line in result.output.splitlines(): | 205 | for line in result.output.splitlines(): |
204 | if line.startswith((' Configuration', ' Image')): | 206 | if line.startswith((' Configuration', ' Image')): |
205 | in_signed = re.search('\((.*)\)', line).groups()[0] | 207 | in_signed = re.search(r'\((.*)\)', line).groups()[0] |
206 | elif re.match('^ *', line) in (' ', ''): | 208 | elif re.match('^ *', line) in (' ', ''): |
207 | in_signed = None | 209 | in_signed = None |
208 | elif in_signed: | 210 | elif in_signed: |
@@ -215,7 +217,10 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" | |||
215 | self.assertIn('conf-am335x-boneblack.dtb', signed_sections) | 217 | self.assertIn('conf-am335x-boneblack.dtb', signed_sections) |
216 | for signed_section, values in signed_sections.items(): | 218 | for signed_section, values in signed_sections.items(): |
217 | value = values.get('Sign algo', None) | 219 | value = values.get('Sign algo', None) |
218 | self.assertEqual(value, 'sha256,rsa2048:oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | 220 | if signed_section.startswith("conf"): |
221 | self.assertEqual(value, 'sha256,rsa2048:cfg-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | ||
222 | else: | ||
223 | self.assertEqual(value, 'sha256,rsa2048:img-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | ||
219 | value = values.get('Sign value', None) | 224 | value = values.get('Sign value', None) |
220 | self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) | 225 | self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) |
221 | 226 | ||
@@ -231,6 +236,480 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" | |||
231 | result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) | 236 | result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) |
232 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work') | 237 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work') |
233 | 238 | ||
239 | def test_uboot_fit_image(self): | ||
240 | """ | ||
241 | Summary: Check if Uboot FIT image and Image Tree Source | ||
242 | (its) are built and the Image Tree Source has the | ||
243 | correct fields. | ||
244 | Expected: 1. u-boot-fitImage and u-boot-its can be built | ||
245 | 2. The type, load address, entrypoint address and | ||
246 | default values of U-boot image are correct in the | ||
247 | Image Tree Source. Not all the fields are tested, | ||
248 | only the key fields that wont vary between | ||
249 | different architectures. | ||
250 | Product: oe-core | ||
251 | Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> | ||
252 | based on work by Usama Arif <usama.arif@arm.com> | ||
253 | """ | ||
254 | config = """ | ||
255 | # We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set | ||
256 | MACHINE = "qemuarm" | ||
257 | UBOOT_MACHINE = "am57xx_evm_defconfig" | ||
258 | SPL_BINARY = "MLO" | ||
259 | |||
260 | # Enable creation of the U-Boot fitImage | ||
261 | UBOOT_FITIMAGE_ENABLE = "1" | ||
262 | |||
263 | # (U-boot) fitImage properties | ||
264 | UBOOT_LOADADDRESS = "0x80080000" | ||
265 | UBOOT_ENTRYPOINT = "0x80080000" | ||
266 | UBOOT_FIT_DESC = "A model description" | ||
267 | |||
268 | # Enable creation of Kernel fitImage | ||
269 | KERNEL_IMAGETYPES += " fitImage " | ||
270 | KERNEL_CLASSES = " kernel-fitimage" | ||
271 | UBOOT_SIGN_ENABLE = "1" | ||
272 | FIT_GENERATE_KEYS = "1" | ||
273 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
274 | UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" | ||
275 | UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" | ||
276 | FIT_SIGN_INDIVIDUAL = "1" | ||
277 | """ | ||
278 | self.write_config(config) | ||
279 | |||
280 | # The U-Boot fitImage is created as part of the U-Boot recipe | ||
281 | bitbake("virtual/bootloader") | ||
282 | |||
283 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
284 | machine = get_bb_var('MACHINE') | ||
285 | fitimage_its_path = os.path.join(deploy_dir_image, | ||
286 | "u-boot-its-%s" % (machine,)) | ||
287 | fitimage_path = os.path.join(deploy_dir_image, | ||
288 | "u-boot-fitImage-%s" % (machine,)) | ||
289 | |||
290 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
291 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
292 | self.assertTrue(os.path.exists(fitimage_path), | ||
293 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
294 | |||
295 | # Check that the type, load address, entrypoint address and default | ||
296 | # values for kernel and ramdisk in Image Tree Source are as expected. | ||
297 | # The order of fields in the below array is important. Not all the | ||
298 | # fields are tested, only the key fields that wont vary between | ||
299 | # different architectures. | ||
300 | its_field_check = [ | ||
301 | 'description = "A model description";', | ||
302 | 'type = "standalone";', | ||
303 | 'load = <0x80080000>;', | ||
304 | 'entry = <0x80080000>;', | ||
305 | 'default = "conf";', | ||
306 | 'loadables = "uboot";', | ||
307 | 'fdt = "fdt";' | ||
308 | ] | ||
309 | |||
310 | with open(fitimage_its_path) as its_file: | ||
311 | field_index = 0 | ||
312 | for line in its_file: | ||
313 | if field_index == len(its_field_check): | ||
314 | break | ||
315 | if its_field_check[field_index] in line: | ||
316 | field_index +=1 | ||
317 | |||
318 | if field_index != len(its_field_check): # if its equal, the test passed | ||
319 | self.assertTrue(field_index == len(its_field_check), | ||
320 | "Fields in Image Tree Source File %s did not match, error in finding %s" | ||
321 | % (fitimage_its_path, its_field_check[field_index])) | ||
322 | |||
323 | def test_uboot_sign_fit_image(self): | ||
324 | """ | ||
325 | Summary: Check if Uboot FIT image and Image Tree Source | ||
326 | (its) are built and the Image Tree Source has the | ||
327 | correct fields, in the scenario where the Kernel | ||
328 | is also creating/signing it's fitImage. | ||
329 | Expected: 1. u-boot-fitImage and u-boot-its can be built | ||
330 | 2. The type, load address, entrypoint address and | ||
331 | default values of U-boot image are correct in the | ||
332 | Image Tree Source. Not all the fields are tested, | ||
333 | only the key fields that wont vary between | ||
334 | different architectures. | ||
335 | Product: oe-core | ||
336 | Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> | ||
337 | based on work by Usama Arif <usama.arif@arm.com> | ||
338 | """ | ||
339 | config = """ | ||
340 | # We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set | ||
341 | MACHINE = "qemuarm" | ||
342 | UBOOT_MACHINE = "am57xx_evm_defconfig" | ||
343 | SPL_BINARY = "MLO" | ||
344 | |||
345 | # Enable creation of the U-Boot fitImage | ||
346 | UBOOT_FITIMAGE_ENABLE = "1" | ||
347 | |||
348 | # (U-boot) fitImage properties | ||
349 | UBOOT_LOADADDRESS = "0x80080000" | ||
350 | UBOOT_ENTRYPOINT = "0x80080000" | ||
351 | UBOOT_FIT_DESC = "A model description" | ||
352 | KERNEL_IMAGETYPES += " fitImage " | ||
353 | KERNEL_CLASSES = " kernel-fitimage " | ||
354 | INHERIT += "test-mkimage-wrapper" | ||
355 | UBOOT_SIGN_ENABLE = "1" | ||
356 | FIT_GENERATE_KEYS = "1" | ||
357 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
358 | UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" | ||
359 | UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" | ||
360 | FIT_SIGN_INDIVIDUAL = "1" | ||
361 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'" | ||
362 | """ | ||
363 | self.write_config(config) | ||
364 | |||
365 | # The U-Boot fitImage is created as part of the U-Boot recipe | ||
366 | bitbake("virtual/bootloader") | ||
367 | |||
368 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
369 | machine = get_bb_var('MACHINE') | ||
370 | fitimage_its_path = os.path.join(deploy_dir_image, | ||
371 | "u-boot-its-%s" % (machine,)) | ||
372 | fitimage_path = os.path.join(deploy_dir_image, | ||
373 | "u-boot-fitImage-%s" % (machine,)) | ||
374 | |||
375 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
376 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
377 | self.assertTrue(os.path.exists(fitimage_path), | ||
378 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
379 | |||
380 | # Check that the type, load address, entrypoint address and default | ||
381 | # values for kernel and ramdisk in Image Tree Source are as expected. | ||
382 | # The order of fields in the below array is important. Not all the | ||
383 | # fields are tested, only the key fields that wont vary between | ||
384 | # different architectures. | ||
385 | its_field_check = [ | ||
386 | 'description = "A model description";', | ||
387 | 'type = "standalone";', | ||
388 | 'load = <0x80080000>;', | ||
389 | 'entry = <0x80080000>;', | ||
390 | 'default = "conf";', | ||
391 | 'loadables = "uboot";', | ||
392 | 'fdt = "fdt";' | ||
393 | ] | ||
394 | |||
395 | with open(fitimage_its_path) as its_file: | ||
396 | field_index = 0 | ||
397 | for line in its_file: | ||
398 | if field_index == len(its_field_check): | ||
399 | break | ||
400 | if its_field_check[field_index] in line: | ||
401 | field_index +=1 | ||
402 | |||
403 | if field_index != len(its_field_check): # if its equal, the test passed | ||
404 | self.assertTrue(field_index == len(its_field_check), | ||
405 | "Fields in Image Tree Source File %s did not match, error in finding %s" | ||
406 | % (fitimage_its_path, its_field_check[field_index])) | ||
407 | |||
408 | |||
409 | def test_sign_standalone_uboot_fit_image(self): | ||
410 | """ | ||
411 | Summary: Check if U-Boot FIT image and Image Tree Source (its) are | ||
412 | created and signed correctly for the scenario where only | ||
413 | the U-Boot proper fitImage is being created and signed. | ||
414 | Expected: 1) U-Boot its and FIT image are built successfully | ||
415 | 2) Scanning the its file indicates signing is enabled | ||
416 | as requested by SPL_SIGN_ENABLE (using keys generated | ||
417 | via UBOOT_FIT_GENERATE_KEYS) | ||
418 | 3) Dumping the FIT image indicates signature values | ||
419 | are present | ||
420 | 4) Examination of the do_uboot_assemble_fitimage | ||
421 | runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN | ||
422 | and SPL_MKIMAGE_SIGN_ARGS are working as expected. | ||
423 | Product: oe-core | ||
424 | Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon | ||
425 | work by Paul Eggleton <paul.eggleton@microsoft.com> and | ||
426 | Usama Arif <usama.arif@arm.com> | ||
427 | """ | ||
428 | config = """ | ||
429 | # There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at | ||
430 | # least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set | ||
431 | MACHINE = "qemuarm" | ||
432 | UBOOT_MACHINE = "am57xx_evm_defconfig" | ||
433 | SPL_BINARY = "MLO" | ||
434 | # The kernel-fitimage class is a dependency even if we're only | ||
435 | # creating/signing the U-Boot fitImage | ||
436 | KERNEL_CLASSES = " kernel-fitimage" | ||
437 | INHERIT += "test-mkimage-wrapper" | ||
438 | # Enable creation and signing of the U-Boot fitImage | ||
439 | UBOOT_FITIMAGE_ENABLE = "1" | ||
440 | SPL_SIGN_ENABLE = "1" | ||
441 | SPL_SIGN_KEYNAME = "spl-oe-selftest" | ||
442 | SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
443 | UBOOT_DTB_BINARY = "u-boot.dtb" | ||
444 | UBOOT_ENTRYPOINT = "0x80000000" | ||
445 | UBOOT_LOADADDRESS = "0x80000000" | ||
446 | UBOOT_DTB_LOADADDRESS = "0x82000000" | ||
447 | UBOOT_ARCH = "arm" | ||
448 | SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | ||
449 | SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'" | ||
450 | UBOOT_EXTLINUX = "0" | ||
451 | UBOOT_FIT_GENERATE_KEYS = "1" | ||
452 | UBOOT_FIT_HASH_ALG = "sha256" | ||
453 | """ | ||
454 | self.write_config(config) | ||
455 | |||
456 | # The U-Boot fitImage is created as part of the U-Boot recipe | ||
457 | bitbake("virtual/bootloader") | ||
458 | |||
459 | image_type = "core-image-minimal" | ||
460 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
461 | machine = get_bb_var('MACHINE') | ||
462 | fitimage_its_path = os.path.join(deploy_dir_image, | ||
463 | "u-boot-its-%s" % (machine,)) | ||
464 | fitimage_path = os.path.join(deploy_dir_image, | ||
465 | "u-boot-fitImage-%s" % (machine,)) | ||
466 | |||
467 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
468 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
469 | self.assertTrue(os.path.exists(fitimage_path), | ||
470 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
471 | |||
472 | req_itspaths = [ | ||
473 | ['/', 'images', 'uboot'], | ||
474 | ['/', 'images', 'uboot', 'signature'], | ||
475 | ['/', 'images', 'fdt'], | ||
476 | ['/', 'images', 'fdt', 'signature'], | ||
477 | ] | ||
478 | |||
479 | itspath = [] | ||
480 | itspaths = [] | ||
481 | linect = 0 | ||
482 | sigs = {} | ||
483 | with open(fitimage_its_path) as its_file: | ||
484 | linect += 1 | ||
485 | for line in its_file: | ||
486 | line = line.strip() | ||
487 | if line.endswith('};'): | ||
488 | itspath.pop() | ||
489 | elif line.endswith('{'): | ||
490 | itspath.append(line[:-1].strip()) | ||
491 | itspaths.append(itspath[:]) | ||
492 | elif itspath and itspath[-1] == 'signature': | ||
493 | itsdotpath = '.'.join(itspath) | ||
494 | if not itsdotpath in sigs: | ||
495 | sigs[itsdotpath] = {} | ||
496 | if not '=' in line or not line.endswith(';'): | ||
497 | self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line)) | ||
498 | key, value = line.split('=', 1) | ||
499 | sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';') | ||
500 | |||
501 | for reqpath in req_itspaths: | ||
502 | if not reqpath in itspaths: | ||
503 | self.fail('Missing section in its file: %s' % reqpath) | ||
504 | |||
505 | reqsigvalues_image = { | ||
506 | 'algo': '"sha256,rsa2048"', | ||
507 | 'key-name-hint': '"spl-oe-selftest"', | ||
508 | } | ||
509 | |||
510 | for itspath, values in sigs.items(): | ||
511 | reqsigvalues = reqsigvalues_image | ||
512 | for reqkey, reqvalue in reqsigvalues.items(): | ||
513 | value = values.get(reqkey, None) | ||
514 | if value is None: | ||
515 | self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath)) | ||
516 | self.assertEqual(value, reqvalue) | ||
517 | |||
518 | # Dump the image to see if it really got signed | ||
519 | bitbake("u-boot-tools-native -c addto_recipe_sysroot") | ||
520 | result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') | ||
521 | recipe_sysroot_native = result.output.split('=')[1].strip('"') | ||
522 | dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage') | ||
523 | result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) | ||
524 | in_signed = None | ||
525 | signed_sections = {} | ||
526 | for line in result.output.splitlines(): | ||
527 | if line.startswith((' Image')): | ||
528 | in_signed = re.search(r'\((.*)\)', line).groups()[0] | ||
529 | elif re.match(' \w', line): | ||
530 | in_signed = None | ||
531 | elif in_signed: | ||
532 | if not in_signed in signed_sections: | ||
533 | signed_sections[in_signed] = {} | ||
534 | key, value = line.split(':', 1) | ||
535 | signed_sections[in_signed][key.strip()] = value.strip() | ||
536 | self.assertIn('uboot', signed_sections) | ||
537 | self.assertIn('fdt', signed_sections) | ||
538 | for signed_section, values in signed_sections.items(): | ||
539 | value = values.get('Sign algo', None) | ||
540 | self.assertEqual(value, 'sha256,rsa2048:spl-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | ||
541 | value = values.get('Sign value', None) | ||
542 | self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) | ||
543 | |||
544 | # Check for SPL_MKIMAGE_SIGN_ARGS | ||
545 | result = runCmd('bitbake -e virtual/bootloader | grep ^T=') | ||
546 | tempdir = result.output.split('=', 1)[1].strip().strip('') | ||
547 | result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
548 | self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used') | ||
549 | |||
550 | # Check for evidence of test-mkimage-wrapper class | ||
551 | result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
552 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work') | ||
553 | result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
554 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work') | ||
555 | |||
556 | def test_sign_cascaded_uboot_fit_image(self): | ||
557 | """ | ||
558 | Summary: Check if U-Boot FIT image and Image Tree Source (its) are | ||
559 | created and signed correctly for the scenario where both | ||
560 | U-Boot proper and Kernel fitImages are being created and | ||
561 | signed. | ||
562 | Expected: 1) U-Boot its and FIT image are built successfully | ||
563 | 2) Scanning the its file indicates signing is enabled | ||
564 | as requested by SPL_SIGN_ENABLE (using keys generated | ||
565 | via UBOOT_FIT_GENERATE_KEYS) | ||
566 | 3) Dumping the FIT image indicates signature values | ||
567 | are present | ||
568 | 4) Examination of the do_uboot_assemble_fitimage | ||
569 | runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN | ||
570 | and SPL_MKIMAGE_SIGN_ARGS are working as expected. | ||
571 | Product: oe-core | ||
572 | Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon | ||
573 | work by Paul Eggleton <paul.eggleton@microsoft.com> and | ||
574 | Usama Arif <usama.arif@arm.com> | ||
575 | """ | ||
576 | config = """ | ||
577 | # There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at | ||
578 | # least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set | ||
579 | MACHINE = "qemuarm" | ||
580 | UBOOT_MACHINE = "am57xx_evm_defconfig" | ||
581 | SPL_BINARY = "MLO" | ||
582 | # Enable creation and signing of the U-Boot fitImage | ||
583 | UBOOT_FITIMAGE_ENABLE = "1" | ||
584 | SPL_SIGN_ENABLE = "1" | ||
585 | SPL_SIGN_KEYNAME = "spl-cascaded-oe-selftest" | ||
586 | SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
587 | UBOOT_DTB_BINARY = "u-boot.dtb" | ||
588 | UBOOT_ENTRYPOINT = "0x80000000" | ||
589 | UBOOT_LOADADDRESS = "0x80000000" | ||
590 | UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | ||
591 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded Kernel comment'" | ||
592 | UBOOT_DTB_LOADADDRESS = "0x82000000" | ||
593 | UBOOT_ARCH = "arm" | ||
594 | SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | ||
595 | SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'" | ||
596 | UBOOT_EXTLINUX = "0" | ||
597 | UBOOT_FIT_GENERATE_KEYS = "1" | ||
598 | UBOOT_FIT_HASH_ALG = "sha256" | ||
599 | KERNEL_IMAGETYPES += " fitImage " | ||
600 | KERNEL_CLASSES = " kernel-fitimage " | ||
601 | INHERIT += "test-mkimage-wrapper" | ||
602 | UBOOT_SIGN_ENABLE = "1" | ||
603 | FIT_GENERATE_KEYS = "1" | ||
604 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
605 | UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" | ||
606 | UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" | ||
607 | FIT_SIGN_INDIVIDUAL = "1" | ||
608 | """ | ||
609 | self.write_config(config) | ||
610 | |||
611 | # The U-Boot fitImage is created as part of the U-Boot recipe | ||
612 | bitbake("virtual/bootloader") | ||
613 | |||
614 | image_type = "core-image-minimal" | ||
615 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
616 | machine = get_bb_var('MACHINE') | ||
617 | fitimage_its_path = os.path.join(deploy_dir_image, | ||
618 | "u-boot-its-%s" % (machine,)) | ||
619 | fitimage_path = os.path.join(deploy_dir_image, | ||
620 | "u-boot-fitImage-%s" % (machine,)) | ||
621 | |||
622 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
623 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
624 | self.assertTrue(os.path.exists(fitimage_path), | ||
625 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
626 | |||
627 | req_itspaths = [ | ||
628 | ['/', 'images', 'uboot'], | ||
629 | ['/', 'images', 'uboot', 'signature'], | ||
630 | ['/', 'images', 'fdt'], | ||
631 | ['/', 'images', 'fdt', 'signature'], | ||
632 | ] | ||
633 | |||
634 | itspath = [] | ||
635 | itspaths = [] | ||
636 | linect = 0 | ||
637 | sigs = {} | ||
638 | with open(fitimage_its_path) as its_file: | ||
639 | linect += 1 | ||
640 | for line in its_file: | ||
641 | line = line.strip() | ||
642 | if line.endswith('};'): | ||
643 | itspath.pop() | ||
644 | elif line.endswith('{'): | ||
645 | itspath.append(line[:-1].strip()) | ||
646 | itspaths.append(itspath[:]) | ||
647 | elif itspath and itspath[-1] == 'signature': | ||
648 | itsdotpath = '.'.join(itspath) | ||
649 | if not itsdotpath in sigs: | ||
650 | sigs[itsdotpath] = {} | ||
651 | if not '=' in line or not line.endswith(';'): | ||
652 | self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line)) | ||
653 | key, value = line.split('=', 1) | ||
654 | sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';') | ||
655 | |||
656 | for reqpath in req_itspaths: | ||
657 | if not reqpath in itspaths: | ||
658 | self.fail('Missing section in its file: %s' % reqpath) | ||
659 | |||
660 | reqsigvalues_image = { | ||
661 | 'algo': '"sha256,rsa2048"', | ||
662 | 'key-name-hint': '"spl-cascaded-oe-selftest"', | ||
663 | } | ||
664 | |||
665 | for itspath, values in sigs.items(): | ||
666 | reqsigvalues = reqsigvalues_image | ||
667 | for reqkey, reqvalue in reqsigvalues.items(): | ||
668 | value = values.get(reqkey, None) | ||
669 | if value is None: | ||
670 | self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath)) | ||
671 | self.assertEqual(value, reqvalue) | ||
672 | |||
673 | # Dump the image to see if it really got signed | ||
674 | bitbake("u-boot-tools-native -c addto_recipe_sysroot") | ||
675 | result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') | ||
676 | recipe_sysroot_native = result.output.split('=')[1].strip('"') | ||
677 | dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage') | ||
678 | result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) | ||
679 | in_signed = None | ||
680 | signed_sections = {} | ||
681 | for line in result.output.splitlines(): | ||
682 | if line.startswith((' Image')): | ||
683 | in_signed = re.search(r'\((.*)\)', line).groups()[0] | ||
684 | elif re.match(' \w', line): | ||
685 | in_signed = None | ||
686 | elif in_signed: | ||
687 | if not in_signed in signed_sections: | ||
688 | signed_sections[in_signed] = {} | ||
689 | key, value = line.split(':', 1) | ||
690 | signed_sections[in_signed][key.strip()] = value.strip() | ||
691 | self.assertIn('uboot', signed_sections) | ||
692 | self.assertIn('fdt', signed_sections) | ||
693 | for signed_section, values in signed_sections.items(): | ||
694 | value = values.get('Sign algo', None) | ||
695 | self.assertEqual(value, 'sha256,rsa2048:spl-cascaded-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | ||
696 | value = values.get('Sign value', None) | ||
697 | self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) | ||
698 | |||
699 | # Check for SPL_MKIMAGE_SIGN_ARGS | ||
700 | result = runCmd('bitbake -e virtual/bootloader | grep ^T=') | ||
701 | tempdir = result.output.split('=', 1)[1].strip().strip('') | ||
702 | result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
703 | self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used') | ||
704 | |||
705 | # Check for evidence of test-mkimage-wrapper class | ||
706 | result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
707 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work') | ||
708 | result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
709 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work') | ||
710 | |||
711 | |||
712 | |||
234 | def test_initramfs_bundle(self): | 713 | def test_initramfs_bundle(self): |
235 | """ | 714 | """ |
236 | Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its) | 715 | Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its) |
@@ -264,9 +743,11 @@ UBOOT_LOADADDRESS = "0x80000000" | |||
264 | UBOOT_DTB_LOADADDRESS = "0x82000000" | 743 | UBOOT_DTB_LOADADDRESS = "0x82000000" |
265 | UBOOT_ARCH = "arm" | 744 | UBOOT_ARCH = "arm" |
266 | UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | 745 | UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" |
746 | UBOOT_MKIMAGE_KERNEL_TYPE = "kernel" | ||
267 | UBOOT_EXTLINUX = "0" | 747 | UBOOT_EXTLINUX = "0" |
268 | FIT_GENERATE_KEYS = "1" | 748 | FIT_GENERATE_KEYS = "1" |
269 | KERNEL_IMAGETYPE_REPLACEMENT = "zImage" | 749 | KERNEL_IMAGETYPE_REPLACEMENT = "zImage" |
750 | FIT_KERNEL_COMP_ALG = "none" | ||
270 | FIT_HASH_ALG = "sha256" | 751 | FIT_HASH_ALG = "sha256" |
271 | """ | 752 | """ |
272 | self.write_config(config) | 753 | self.write_config(config) |
@@ -288,9 +769,9 @@ FIT_HASH_ALG = "sha256" | |||
288 | 769 | ||
289 | kernel_load = str(get_bb_var('UBOOT_LOADADDRESS')) | 770 | kernel_load = str(get_bb_var('UBOOT_LOADADDRESS')) |
290 | kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT')) | 771 | kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT')) |
291 | initramfs_bundle_format = str(get_bb_var('KERNEL_IMAGETYPE_REPLACEMENT')) | 772 | kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE')) |
773 | kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG')) | ||
292 | uboot_arch = str(get_bb_var('UBOOT_ARCH')) | 774 | uboot_arch = str(get_bb_var('UBOOT_ARCH')) |
293 | initramfs_bundle = "arch/" + uboot_arch + "/boot/" + initramfs_bundle_format + ".initramfs" | ||
294 | fit_hash_alg = str(get_bb_var('FIT_HASH_ALG')) | 775 | fit_hash_alg = str(get_bb_var('FIT_HASH_ALG')) |
295 | 776 | ||
296 | its_file = open(fitimage_its_path) | 777 | its_file = open(fitimage_its_path) |
@@ -300,11 +781,11 @@ FIT_HASH_ALG = "sha256" | |||
300 | exp_node_lines = [ | 781 | exp_node_lines = [ |
301 | 'kernel-1 {', | 782 | 'kernel-1 {', |
302 | 'description = "Linux kernel";', | 783 | 'description = "Linux kernel";', |
303 | 'data = /incbin/("' + initramfs_bundle + '");', | 784 | 'data = /incbin/("linux.bin");', |
304 | 'type = "kernel";', | 785 | 'type = "' + kernel_type + '";', |
305 | 'arch = "' + uboot_arch + '";', | 786 | 'arch = "' + uboot_arch + '";', |
306 | 'os = "linux";', | 787 | 'os = "linux";', |
307 | 'compression = "none";', | 788 | 'compression = "' + kernel_compression + '";', |
308 | 'load = <' + kernel_load + '>;', | 789 | 'load = <' + kernel_load + '>;', |
309 | 'entry = <' + kernel_entry + '>;', | 790 | 'entry = <' + kernel_entry + '>;', |
310 | 'hash-1 {', | 791 | 'hash-1 {', |
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py index 3efe15228f..89360178fe 100644 --- a/meta/lib/oeqa/selftest/cases/gcc.py +++ b/meta/lib/oeqa/selftest/cases/gcc.py | |||
@@ -1,9 +1,14 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
1 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | ||
2 | import os | 6 | import os |
7 | import time | ||
3 | from oeqa.core.decorator import OETestTag | 8 | from oeqa.core.decorator import OETestTag |
4 | from oeqa.core.case import OEPTestResultTestCase | 9 | from oeqa.core.case import OEPTestResultTestCase |
5 | from oeqa.selftest.case import OESelftestTestCase | 10 | from oeqa.selftest.case import OESelftestTestCase |
6 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command | 11 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu |
7 | 12 | ||
8 | def parse_values(content): | 13 | def parse_values(content): |
9 | for i in content: | 14 | for i in content: |
@@ -39,8 +44,13 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
39 | self.write_config("\n".join(features)) | 44 | self.write_config("\n".join(features)) |
40 | 45 | ||
41 | recipe = "gcc-runtime" | 46 | recipe = "gcc-runtime" |
47 | |||
48 | start_time = time.time() | ||
49 | |||
42 | bitbake("{} -c check".format(recipe)) | 50 | bitbake("{} -c check".format(recipe)) |
43 | 51 | ||
52 | end_time = time.time() | ||
53 | |||
44 | bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe) | 54 | bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe) |
45 | builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"] | 55 | builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"] |
46 | 56 | ||
@@ -54,7 +64,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
54 | 64 | ||
55 | ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite | 65 | ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite |
56 | ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite | 66 | ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite |
57 | self.ptest_section(ptestsuite, logfile = logpath) | 67 | self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath) |
58 | with open(sumspath, "r") as f: | 68 | with open(sumspath, "r") as f: |
59 | for test, result in parse_values(f): | 69 | for test, result in parse_values(f): |
60 | self.ptest_result(ptestsuite, test, result) | 70 | self.ptest_result(ptestsuite, test, result) |
@@ -114,37 +124,44 @@ class GccLibItmSelfTest(GccSelfTestBase): | |||
114 | self.run_check("libitm") | 124 | self.run_check("libitm") |
115 | 125 | ||
116 | @OETestTag("toolchain-system") | 126 | @OETestTag("toolchain-system") |
127 | @OETestTag("runqemu") | ||
117 | class GccCrossSelfTestSystemEmulated(GccSelfTestBase): | 128 | class GccCrossSelfTestSystemEmulated(GccSelfTestBase): |
118 | def test_cross_gcc(self): | 129 | def test_cross_gcc(self): |
119 | self.run_check_emulated("gcc") | 130 | self.run_check_emulated("gcc") |
120 | 131 | ||
121 | @OETestTag("toolchain-system") | 132 | @OETestTag("toolchain-system") |
133 | @OETestTag("runqemu") | ||
122 | class GxxCrossSelfTestSystemEmulated(GccSelfTestBase): | 134 | class GxxCrossSelfTestSystemEmulated(GccSelfTestBase): |
123 | def test_cross_gxx(self): | 135 | def test_cross_gxx(self): |
124 | self.run_check_emulated("g++") | 136 | self.run_check_emulated("g++") |
125 | 137 | ||
126 | @OETestTag("toolchain-system") | 138 | @OETestTag("toolchain-system") |
139 | @OETestTag("runqemu") | ||
127 | class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase): | 140 | class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase): |
128 | def test_libatomic(self): | 141 | def test_libatomic(self): |
129 | self.run_check_emulated("libatomic") | 142 | self.run_check_emulated("libatomic") |
130 | 143 | ||
131 | @OETestTag("toolchain-system") | 144 | @OETestTag("toolchain-system") |
145 | @OETestTag("runqemu") | ||
132 | class GccLibGompSelfTestSystemEmulated(GccSelfTestBase): | 146 | class GccLibGompSelfTestSystemEmulated(GccSelfTestBase): |
133 | def test_libgomp(self): | 147 | def test_libgomp(self): |
134 | self.run_check_emulated("libgomp") | 148 | self.run_check_emulated("libgomp") |
135 | 149 | ||
136 | @OETestTag("toolchain-system") | 150 | @OETestTag("toolchain-system") |
151 | @OETestTag("runqemu") | ||
137 | class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase): | 152 | class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase): |
138 | def test_libstdcxx(self): | 153 | def test_libstdcxx(self): |
139 | self.run_check_emulated("libstdc++-v3") | 154 | self.run_check_emulated("libstdc++-v3") |
140 | 155 | ||
141 | @OETestTag("toolchain-system") | 156 | @OETestTag("toolchain-system") |
157 | @OETestTag("runqemu") | ||
142 | class GccLibSspSelfTestSystemEmulated(GccSelfTestBase): | 158 | class GccLibSspSelfTestSystemEmulated(GccSelfTestBase): |
143 | def test_libssp(self): | 159 | def test_libssp(self): |
144 | self.check_skip("libssp") | 160 | self.check_skip("libssp") |
145 | self.run_check_emulated("libssp") | 161 | self.run_check_emulated("libssp") |
146 | 162 | ||
147 | @OETestTag("toolchain-system") | 163 | @OETestTag("toolchain-system") |
164 | @OETestTag("runqemu") | ||
148 | class GccLibItmSelfTestSystemEmulated(GccSelfTestBase): | 165 | class GccLibItmSelfTestSystemEmulated(GccSelfTestBase): |
149 | def test_libitm(self): | 166 | def test_libitm(self): |
150 | self.check_skip("libitm") | 167 | self.check_skip("libitm") |
diff --git a/meta/lib/oeqa/selftest/cases/gdbserver.py b/meta/lib/oeqa/selftest/cases/gdbserver.py new file mode 100644 index 0000000000..9da97ae780 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/gdbserver.py | |||
@@ -0,0 +1,67 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | import os | ||
7 | import time | ||
8 | import tempfile | ||
9 | import shutil | ||
10 | import concurrent.futures | ||
11 | |||
12 | from oeqa.selftest.case import OESelftestTestCase | ||
13 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars , runqemu, runCmd | ||
14 | |||
15 | class GdbServerTest(OESelftestTestCase): | ||
16 | def test_gdb_server(self): | ||
17 | target_arch = self.td["TARGET_ARCH"] | ||
18 | target_sys = self.td["TARGET_SYS"] | ||
19 | |||
20 | features = """ | ||
21 | IMAGE_GEN_DEBUGFS = "1" | ||
22 | IMAGE_FSTYPES_DEBUGFS = "tar.bz2" | ||
23 | CORE_IMAGE_EXTRA_INSTALL = "gdbserver" | ||
24 | """ | ||
25 | self.write_config(features) | ||
26 | |||
27 | gdb_recipe = "gdb-cross-" + target_arch | ||
28 | gdb_binary = target_sys + "-gdb" | ||
29 | |||
30 | bitbake("core-image-minimal %s:do_addto_recipe_sysroot" % gdb_recipe) | ||
31 | |||
32 | native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe) | ||
33 | r = runCmd("%s --version" % gdb_binary, native_sysroot=native_sysroot, target_sys=target_sys) | ||
34 | self.assertEqual(r.status, 0) | ||
35 | self.assertIn("GNU gdb", r.output) | ||
36 | image = 'core-image-minimal' | ||
37 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) | ||
38 | |||
39 | with tempfile.TemporaryDirectory(prefix="debugfs-") as debugfs: | ||
40 | filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.tar.bz2" % bb_vars['IMAGE_LINK_NAME']) | ||
41 | shutil.unpack_archive(filename, debugfs) | ||
42 | filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.tar.bz2" % bb_vars['IMAGE_LINK_NAME']) | ||
43 | shutil.unpack_archive(filename, debugfs) | ||
44 | |||
45 | with runqemu("core-image-minimal", runqemuparams="nographic") as qemu: | ||
46 | status, output = qemu.run_serial("kmod --help") | ||
47 | self.assertIn("modprobe", output) | ||
48 | |||
49 | with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: | ||
50 | def run_gdb(): | ||
51 | for _ in range(5): | ||
52 | time.sleep(2) | ||
53 | cmd = "%s --batch -ex 'set sysroot %s' -ex \"target extended-remote %s:9999\" -ex \"info line kmod_help\"" % (gdb_binary, debugfs, qemu.ip) | ||
54 | self.logger.warning("starting gdb %s" % cmd) | ||
55 | r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys) | ||
56 | self.assertEqual(0, r.status) | ||
57 | line_re = r"Line \d+ of \"/usr/src/debug/kmod/.*/tools/kmod.c\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>" | ||
58 | self.assertRegex(r.output, line_re) | ||
59 | break | ||
60 | else: | ||
61 | self.fail("Timed out connecting to gdb") | ||
62 | future = executor.submit(run_gdb) | ||
63 | |||
64 | status, output = qemu.run_serial("gdbserver --once :9999 kmod --help") | ||
65 | self.assertEqual(status, 1) | ||
66 | # The future either returns None, or raises an exception | ||
67 | future.result() | ||
diff --git a/meta/lib/oeqa/selftest/cases/gitarchivetests.py b/meta/lib/oeqa/selftest/cases/gitarchivetests.py new file mode 100644 index 0000000000..71382089c1 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/gitarchivetests.py | |||
@@ -0,0 +1,136 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import os | ||
8 | import sys | ||
9 | basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../') | ||
10 | lib_path = basepath + '/scripts/lib' | ||
11 | sys.path = sys.path + [lib_path] | ||
12 | import oeqa.utils.gitarchive as ga | ||
13 | from oeqa.utils.git import GitError | ||
14 | import tempfile | ||
15 | import shutil | ||
16 | import scriptutils | ||
17 | import logging | ||
18 | from oeqa.selftest.case import OESelftestTestCase | ||
19 | |||
20 | logger = scriptutils.logger_create('resulttool') | ||
21 | |||
22 | def create_fake_repository(commit, tag_list=[], add_remote=True): | ||
23 | """ Create a testing git directory | ||
24 | |||
25 | Initialize a simple git repository with one initial commit, and as many | ||
26 | tags on this commit as listed in tag_list | ||
27 | Returns both git directory path and gitarchive git object | ||
28 | If commit is true, fake data will be commited, otherwise it will stay in staging area | ||
29 | If commit is true and tag_lsit is non empty, all tags in tag_list will be | ||
30 | created on the initial commit | ||
31 | Fake remote will also be added to make git ls-remote work | ||
32 | """ | ||
33 | fake_data_file = "fake_data.txt" | ||
34 | tempdir = tempfile.mkdtemp(prefix='fake_results.') | ||
35 | repo = ga.init_git_repo(tempdir, False, False, logger) | ||
36 | if add_remote: | ||
37 | repo.run_cmd(["remote", "add", "origin", "."]) | ||
38 | with open(os.path.join(tempdir, fake_data_file), "w") as fake_data: | ||
39 | fake_data.write("Fake data") | ||
40 | if commit: | ||
41 | repo.run_cmd(["add", fake_data_file]) | ||
42 | repo.run_cmd(["commit", "-m", "\"Add fake data\""]) | ||
43 | for tag in tag_list: | ||
44 | repo.run_cmd(["tag", tag]) | ||
45 | |||
46 | return tempdir, repo | ||
47 | |||
48 | def delete_fake_repository(path): | ||
49 | shutil.rmtree(path) | ||
50 | |||
51 | def tag_exists(git_obj, target_tag): | ||
52 | for tag in git_obj.run_cmd(["tag"]).splitlines(): | ||
53 | if target_tag == tag: | ||
54 | return True | ||
55 | return False | ||
56 | |||
57 | class GitArchiveTests(OESelftestTestCase): | ||
58 | TEST_BRANCH="main" | ||
59 | TEST_COMMIT="0f7d5df" | ||
60 | TEST_COMMIT_COUNT="42" | ||
61 | |||
62 | @classmethod | ||
63 | def setUpClass(cls): | ||
64 | super().setUpClass() | ||
65 | cls.log = logging.getLogger('gitarchivetests') | ||
66 | cls.log.setLevel(logging.DEBUG) | ||
67 | |||
68 | def test_create_first_test_tag(self): | ||
69 | path, git_obj = create_fake_repository(False) | ||
70 | keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT} | ||
71 | target_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0" | ||
72 | |||
73 | ga.gitarchive(path, path, True, False, | ||
74 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", | ||
75 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", | ||
76 | 'Test run #{tag_number} of {branch}:{commit}', '', | ||
77 | [], [], False, keywords, logger) | ||
78 | self.assertTrue(tag_exists(git_obj, target_tag), msg=f"Tag {target_tag} has not been created") | ||
79 | delete_fake_repository(path) | ||
80 | |||
81 | def test_create_second_test_tag(self): | ||
82 | first_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0" | ||
83 | second_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/1" | ||
84 | keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT} | ||
85 | |||
86 | path, git_obj = create_fake_repository(True, [first_tag]) | ||
87 | ga.gitarchive(path, path, True, False, | ||
88 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", | ||
89 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", | ||
90 | 'Test run #{tag_number} of {branch}:{commit}', '', | ||
91 | [], [], False, keywords, logger) | ||
92 | self.assertTrue(tag_exists(git_obj, second_tag), msg=f"Second tag {second_tag} has not been created") | ||
93 | delete_fake_repository(path) | ||
94 | |||
95 | def test_get_revs_on_branch(self): | ||
96 | fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"] | ||
97 | tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}" | ||
98 | |||
99 | path, git_obj = create_fake_repository(True, fake_tags_list) | ||
100 | revs = ga.get_test_revs(logger, git_obj, tag_name, branch="main") | ||
101 | self.assertEqual(len(revs), 1) | ||
102 | self.assertEqual(revs[0].commit, "0f7d5df") | ||
103 | self.assertEqual(len(revs[0].tags), 2) | ||
104 | self.assertEqual(revs[0].tags, ['main/10-g0f7d5df/0', 'main/10-g0f7d5df/1']) | ||
105 | delete_fake_repository(path) | ||
106 | |||
107 | def test_get_tags_without_valid_remote(self): | ||
108 | url = 'git://git.yoctoproject.org/poky' | ||
109 | path, git_obj = create_fake_repository(False, None, False) | ||
110 | |||
111 | tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url) | ||
112 | """Test for some well established tags (released tags)""" | ||
113 | self.assertIn("yocto-4.0", tags) | ||
114 | self.assertIn("yocto-4.1", tags) | ||
115 | self.assertIn("yocto-4.2", tags) | ||
116 | delete_fake_repository(path) | ||
117 | |||
118 | def test_get_tags_with_only_local_tag(self): | ||
119 | fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"] | ||
120 | path, git_obj = create_fake_repository(True, fake_tags_list, False) | ||
121 | |||
122 | """No remote is configured and no url is passed: get_tags must fall | ||
123 | back to local tags | ||
124 | """ | ||
125 | tags = ga.get_tags(git_obj, self.log) | ||
126 | self.assertCountEqual(tags, fake_tags_list) | ||
127 | delete_fake_repository(path) | ||
128 | |||
129 | def test_get_tags_without_valid_remote_and_wrong_url(self): | ||
130 | url = 'git://git.foo.org/bar' | ||
131 | path, git_obj = create_fake_repository(False, None, False) | ||
132 | |||
133 | """Test for some well established tags (released tags)""" | ||
134 | with self.assertRaises(GitError): | ||
135 | tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url) | ||
136 | delete_fake_repository(path) | ||
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py index c687f6ef93..bd56b2f6e7 100644 --- a/meta/lib/oeqa/selftest/cases/glibc.py +++ b/meta/lib/oeqa/selftest/cases/glibc.py | |||
@@ -1,10 +1,15 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
1 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | ||
2 | import os | 6 | import os |
7 | import time | ||
3 | import contextlib | 8 | import contextlib |
4 | from oeqa.core.decorator import OETestTag | 9 | from oeqa.core.decorator import OETestTag |
5 | from oeqa.core.case import OEPTestResultTestCase | 10 | from oeqa.core.case import OEPTestResultTestCase |
6 | from oeqa.selftest.case import OESelftestTestCase | 11 | from oeqa.selftest.case import OESelftestTestCase |
7 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command | 12 | from oeqa.utils.commands import bitbake, get_bb_var, runqemu |
8 | from oeqa.utils.nfs import unfs_server | 13 | from oeqa.utils.nfs import unfs_server |
9 | 14 | ||
10 | def parse_values(content): | 15 | def parse_values(content): |
@@ -24,16 +29,20 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
24 | features.append('TOOLCHAIN_TEST_HOST_USER = "root"') | 29 | features.append('TOOLCHAIN_TEST_HOST_USER = "root"') |
25 | features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') | 30 | features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') |
26 | # force single threaded test execution | 31 | # force single threaded test execution |
27 | features.append('EGLIBCPARALLELISM_task-check_pn-glibc-testsuite = "PARALLELMFLAGS="-j1""') | 32 | features.append('EGLIBCPARALLELISM:task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""') |
28 | self.write_config("\n".join(features)) | 33 | self.write_config("\n".join(features)) |
29 | 34 | ||
35 | start_time = time.time() | ||
36 | |||
30 | bitbake("glibc-testsuite -c check") | 37 | bitbake("glibc-testsuite -c check") |
31 | 38 | ||
39 | end_time = time.time() | ||
40 | |||
32 | builddir = get_bb_var("B", "glibc-testsuite") | 41 | builddir = get_bb_var("B", "glibc-testsuite") |
33 | 42 | ||
34 | ptestsuite = "glibc-user" if ssh is None else "glibc" | 43 | ptestsuite = "glibc-user" if ssh is None else "glibc" |
35 | self.ptest_section(ptestsuite) | 44 | self.ptest_section(ptestsuite, duration = int(end_time - start_time)) |
36 | with open(os.path.join(builddir, "tests.sum"), "r") as f: | 45 | with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f: |
37 | for test, result in parse_values(f): | 46 | for test, result in parse_values(f): |
38 | self.ptest_result(ptestsuite, test, result) | 47 | self.ptest_result(ptestsuite, test, result) |
39 | 48 | ||
@@ -41,7 +50,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
41 | with contextlib.ExitStack() as s: | 50 | with contextlib.ExitStack() as s: |
42 | # use the base work dir, as the nfs mount, since the recipe directory may not exist | 51 | # use the base work dir, as the nfs mount, since the recipe directory may not exist |
43 | tmpdir = get_bb_var("BASE_WORKDIR") | 52 | tmpdir = get_bb_var("BASE_WORKDIR") |
44 | nfsport, mountport = s.enter_context(unfs_server(tmpdir)) | 53 | nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False)) |
45 | 54 | ||
46 | # build core-image-minimal with required packages | 55 | # build core-image-minimal with required packages |
47 | default_installed_packages = [ | 56 | default_installed_packages = [ |
@@ -61,7 +70,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
61 | bitbake("core-image-minimal") | 70 | bitbake("core-image-minimal") |
62 | 71 | ||
63 | # start runqemu | 72 | # start runqemu |
64 | qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic")) | 73 | qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024")) |
65 | 74 | ||
66 | # validate that SSH is working | 75 | # validate that SSH is working |
67 | status, _ = qemu.run("uname") | 76 | status, _ = qemu.run("uname") |
@@ -70,7 +79,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
70 | # setup nfs mount | 79 | # setup nfs mount |
71 | if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0: | 80 | if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0: |
72 | raise Exception("Failed to setup NFS mount directory on target") | 81 | raise Exception("Failed to setup NFS mount directory on target") |
73 | mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir) | 82 | mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir) |
74 | status, output = qemu.run(mountcmd) | 83 | status, output = qemu.run(mountcmd) |
75 | if status != 0: | 84 | if status != 0: |
76 | raise Exception("Failed to setup NFS mount on target ({})".format(repr(output))) | 85 | raise Exception("Failed to setup NFS mount on target ({})".format(repr(output))) |
@@ -83,6 +92,7 @@ class GlibcSelfTest(GlibcSelfTestBase): | |||
83 | self.run_check() | 92 | self.run_check() |
84 | 93 | ||
85 | @OETestTag("toolchain-system") | 94 | @OETestTag("toolchain-system") |
95 | @OETestTag("runqemu") | ||
86 | class GlibcSelfTestSystemEmulated(GlibcSelfTestBase): | 96 | class GlibcSelfTestSystemEmulated(GlibcSelfTestBase): |
87 | def test_glibc(self): | 97 | def test_glibc(self): |
88 | self.run_check_emulated() | 98 | self.run_check_emulated() |
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py index 4fc3605f42..ee2cf4b09a 100644 --- a/meta/lib/oeqa/selftest/cases/gotoolchain.py +++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -50,6 +52,9 @@ class oeGoToolchainSelfTest(OESelftestTestCase): | |||
50 | cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name) | 52 | cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name) |
51 | cmd = cmd + ". %s; " % self.env_SDK | 53 | cmd = cmd + ". %s; " % self.env_SDK |
52 | cmd = cmd + "export GOPATH=%s; " % self.go_path | 54 | cmd = cmd + "export GOPATH=%s; " % self.go_path |
55 | cmd = cmd + "export GOFLAGS=-modcacherw; " | ||
56 | cmd = cmd + "export CGO_ENABLED=1; " | ||
57 | cmd = cmd + "export GOPROXY=https://proxy.golang.org,direct; " | ||
53 | cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd | 58 | cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd |
54 | return runCmd(cmd).status | 59 | return runCmd(cmd).status |
55 | 60 | ||
diff --git a/meta/lib/oeqa/selftest/cases/image_typedep.py b/meta/lib/oeqa/selftest/cases/image_typedep.py index 52e1080f13..17c98baf14 100644 --- a/meta/lib/oeqa/selftest/cases/image_typedep.py +++ b/meta/lib/oeqa/selftest/cases/image_typedep.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -9,7 +11,7 @@ from oeqa.utils.commands import bitbake | |||
9 | 11 | ||
10 | class ImageTypeDepTests(OESelftestTestCase): | 12 | class ImageTypeDepTests(OESelftestTestCase): |
11 | 13 | ||
12 | # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that | 14 | # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that |
13 | # the conversion type bar gets added as a dep as well | 15 | # the conversion type bar gets added as a dep as well |
14 | def test_conversion_typedep_added(self): | 16 | def test_conversion_typedep_added(self): |
15 | 17 | ||
@@ -22,7 +24,7 @@ LICENSE = "MIT" | |||
22 | IMAGE_FSTYPES = "testfstype" | 24 | IMAGE_FSTYPES = "testfstype" |
23 | 25 | ||
24 | IMAGE_TYPES_MASKED += "testfstype" | 26 | IMAGE_TYPES_MASKED += "testfstype" |
25 | IMAGE_TYPEDEP_testfstype = "tar.bz2" | 27 | IMAGE_TYPEDEP:testfstype = "tar.bz2" |
26 | 28 | ||
27 | inherit image | 29 | inherit image |
28 | 30 | ||
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py index 6723a8198f..dc88c222bd 100644 --- a/meta/lib/oeqa/selftest/cases/imagefeatures.py +++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py | |||
@@ -1,9 +1,12 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | from oeqa.selftest.case import OESelftestTestCase | 7 | from oeqa.selftest.case import OESelftestTestCase |
6 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu | 8 | from oeqa.core.decorator import OETestTag |
9 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu | ||
7 | from oeqa.utils.sshcontrol import SSHControl | 10 | from oeqa.utils.sshcontrol import SSHControl |
8 | import glob | 11 | import glob |
9 | import os | 12 | import os |
@@ -14,6 +17,7 @@ class ImageFeatures(OESelftestTestCase): | |||
14 | test_user = 'tester' | 17 | test_user = 'tester' |
15 | root_user = 'root' | 18 | root_user = 'root' |
16 | 19 | ||
20 | @OETestTag("runqemu") | ||
17 | def test_non_root_user_can_connect_via_ssh_without_password(self): | 21 | def test_non_root_user_can_connect_via_ssh_without_password(self): |
18 | """ | 22 | """ |
19 | Summary: Check if non root user can connect via ssh without password | 23 | Summary: Check if non root user can connect via ssh without password |
@@ -39,6 +43,7 @@ class ImageFeatures(OESelftestTestCase): | |||
39 | status, output = ssh.run("true") | 43 | status, output = ssh.run("true") |
40 | self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output)) | 44 | self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output)) |
41 | 45 | ||
46 | @OETestTag("runqemu") | ||
42 | def test_all_users_can_connect_via_ssh_without_password(self): | 47 | def test_all_users_can_connect_via_ssh_without_password(self): |
43 | """ | 48 | """ |
44 | Summary: Check if all users can connect via ssh without password | 49 | Summary: Check if all users can connect via ssh without password |
@@ -68,18 +73,6 @@ class ImageFeatures(OESelftestTestCase): | |||
68 | self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output) | 73 | self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output) |
69 | 74 | ||
70 | 75 | ||
71 | def test_clutter_image_can_be_built(self): | ||
72 | """ | ||
73 | Summary: Check if clutter image can be built | ||
74 | Expected: 1. core-image-clutter can be built | ||
75 | Product: oe-core | ||
76 | Author: Ionut Chisanovici <ionutx.chisanovici@intel.com> | ||
77 | AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com> | ||
78 | """ | ||
79 | |||
80 | # Build a core-image-clutter | ||
81 | bitbake('core-image-clutter') | ||
82 | |||
83 | def test_wayland_support_in_image(self): | 76 | def test_wayland_support_in_image(self): |
84 | """ | 77 | """ |
85 | Summary: Check Wayland support in image | 78 | Summary: Check Wayland support in image |
@@ -109,12 +102,11 @@ class ImageFeatures(OESelftestTestCase): | |||
109 | features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"' | 102 | features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"' |
110 | self.write_config(features) | 103 | self.write_config(features) |
111 | 104 | ||
112 | image_name = 'core-image-minimal' | 105 | image = 'core-image-minimal' |
113 | bitbake(image_name) | 106 | bitbake(image) |
107 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) | ||
114 | 108 | ||
115 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 109 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4" % bb_vars['IMAGE_LINK_NAME']) |
116 | link_name = get_bb_var('IMAGE_LINK_NAME', image_name) | ||
117 | image_path = os.path.join(deploy_dir_image, "%s.ext4" % link_name) | ||
118 | bmap_path = "%s.bmap" % image_path | 110 | bmap_path = "%s.bmap" % image_path |
119 | gzip_path = "%s.gz" % bmap_path | 111 | gzip_path = "%s.gz" % bmap_path |
120 | 112 | ||
@@ -127,8 +119,8 @@ class ImageFeatures(OESelftestTestCase): | |||
127 | image_stat = os.stat(image_path) | 119 | image_stat = os.stat(image_path) |
128 | self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512) | 120 | self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512) |
129 | 121 | ||
130 | # check if the resulting gzip is valid | 122 | # check if the resulting gzip is valid, --force is needed in case gzip_path is a symlink |
131 | self.assertTrue(runCmd('gzip -t %s' % gzip_path)) | 123 | self.assertTrue(runCmd('gzip --test --force %s' % gzip_path)) |
132 | 124 | ||
133 | def test_hypervisor_fmts(self): | 125 | def test_hypervisor_fmts(self): |
134 | """ | 126 | """ |
@@ -143,17 +135,16 @@ class ImageFeatures(OESelftestTestCase): | |||
143 | img_types = [ 'vmdk', 'vdi', 'qcow2' ] | 135 | img_types = [ 'vmdk', 'vdi', 'qcow2' ] |
144 | features = "" | 136 | features = "" |
145 | for itype in img_types: | 137 | for itype in img_types: |
146 | features += 'IMAGE_FSTYPES += "wic.%s"\n' % itype | 138 | features += 'IMAGE_FSTYPES += "ext4.%s"\n' % itype |
147 | self.write_config(features) | 139 | self.write_config(features) |
148 | 140 | ||
149 | image_name = 'core-image-minimal' | 141 | image = 'core-image-minimal' |
150 | bitbake(image_name) | 142 | bitbake(image) |
143 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) | ||
151 | 144 | ||
152 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
153 | link_name = get_bb_var('IMAGE_LINK_NAME', image_name) | ||
154 | for itype in img_types: | 145 | for itype in img_types: |
155 | image_path = os.path.join(deploy_dir_image, "%s.wic.%s" % | 146 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4.%s" % |
156 | (link_name, itype)) | 147 | (bb_vars['IMAGE_LINK_NAME'], itype)) |
157 | 148 | ||
158 | # check if result image file is in deploy directory | 149 | # check if result image file is in deploy directory |
159 | self.assertTrue(os.path.exists(image_path)) | 150 | self.assertTrue(os.path.exists(image_path)) |
@@ -173,24 +164,22 @@ class ImageFeatures(OESelftestTestCase): | |||
173 | """ | 164 | """ |
174 | Summary: Check for chaining many CONVERSION_CMDs together | 165 | Summary: Check for chaining many CONVERSION_CMDs together |
175 | Expected: 1. core-image-minimal can be built with | 166 | Expected: 1. core-image-minimal can be built with |
176 | ext4.bmap.gz.bz2.lzo.xz.u-boot and also create a | 167 | ext4.bmap.gz.bz2.zst.xz.u-boot and also create a |
177 | sha256sum | 168 | sha256sum |
178 | 2. The above image has a valid sha256sum | 169 | 2. The above image has a valid sha256sum |
179 | Product: oe-core | 170 | Product: oe-core |
180 | Author: Tom Rini <trini@konsulko.com> | 171 | Author: Tom Rini <trini@konsulko.com> |
181 | """ | 172 | """ |
182 | 173 | ||
183 | conv = "ext4.bmap.gz.bz2.lzo.xz.u-boot" | 174 | conv = "ext4.bmap.gz.bz2.zst.xz.u-boot" |
184 | features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv) | 175 | features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv) |
185 | self.write_config(features) | 176 | self.write_config(features) |
186 | 177 | ||
187 | image_name = 'core-image-minimal' | 178 | image = 'core-image-minimal' |
188 | bitbake(image_name) | 179 | bitbake(image) |
189 | 180 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) | |
190 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 181 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % |
191 | link_name = get_bb_var('IMAGE_LINK_NAME', image_name) | 182 | (bb_vars['IMAGE_LINK_NAME'], conv)) |
192 | image_path = os.path.join(deploy_dir_image, "%s.%s" % | ||
193 | (link_name, conv)) | ||
194 | 183 | ||
195 | # check if resulting image is in the deploy directory | 184 | # check if resulting image is in the deploy directory |
196 | self.assertTrue(os.path.exists(image_path)) | 185 | self.assertTrue(os.path.exists(image_path)) |
@@ -198,7 +187,7 @@ class ImageFeatures(OESelftestTestCase): | |||
198 | 187 | ||
199 | # check if the resulting sha256sum agrees | 188 | # check if the resulting sha256sum agrees |
200 | self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' % | 189 | self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' % |
201 | (deploy_dir_image, link_name, conv))) | 190 | (bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'], conv))) |
202 | 191 | ||
203 | def test_image_fstypes(self): | 192 | def test_image_fstypes(self): |
204 | """ | 193 | """ |
@@ -207,26 +196,43 @@ class ImageFeatures(OESelftestTestCase): | |||
207 | Product: oe-core | 196 | Product: oe-core |
208 | Author: Ed Bartosh <ed.bartosh@linux.intel.com> | 197 | Author: Ed Bartosh <ed.bartosh@linux.intel.com> |
209 | """ | 198 | """ |
210 | image_name = 'core-image-minimal' | 199 | image = 'core-image-minimal' |
211 | 200 | ||
212 | all_image_types = set(get_bb_var("IMAGE_TYPES", image_name).split()) | 201 | all_image_types = set(get_bb_var("IMAGE_TYPES", image).split()) |
213 | blacklist = set(('container', 'elf', 'f2fs', 'multiubi', 'tar.zst', 'wic.zst')) | 202 | skip_image_types = set(('container', 'elf', 'f2fs', 'tar.zst', 'wic.zst', 'squashfs-lzo', 'vfat')) |
214 | img_types = all_image_types - blacklist | 203 | img_types = all_image_types - skip_image_types |
215 | 204 | ||
216 | config = 'IMAGE_FSTYPES += "%s"\n'\ | 205 | config = """ |
217 | 'MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"\n'\ | 206 | IMAGE_FSTYPES += "%s" |
218 | 'UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"' % ' '.join(img_types) | 207 | WKS_FILE = "wictestdisk.wks" |
208 | MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047" | ||
209 | UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512" | ||
210 | MULTIUBI_BUILD += "mtd_2_128" | ||
211 | MKUBIFS_ARGS_mtd_2_128 ?= "-m 2048 -e 129024 -c 2047" | ||
212 | UBINIZE_ARGS_mtd_2_128 ?= "-m 2048 -p 128KiB -s 512" | ||
213 | MULTIUBI_BUILD += "mtd_4_256" | ||
214 | MKUBIFS_ARGS_mtd_4_256 ?= "-m 4096 -e 253952 -c 4096" | ||
215 | UBINIZE_ARGS_mtd_4_256 ?= "-m 4096 -p 256KiB" | ||
216 | """ % ' '.join(img_types) | ||
219 | self.write_config(config) | 217 | self.write_config(config) |
220 | 218 | ||
221 | bitbake(image_name) | 219 | bitbake(image) |
220 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'MULTIUBI_BUILD'], image) | ||
222 | 221 | ||
223 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
224 | link_name = get_bb_var('IMAGE_LINK_NAME', image_name) | ||
225 | for itype in img_types: | 222 | for itype in img_types: |
226 | image_path = os.path.join(deploy_dir_image, "%s.%s" % (link_name, itype)) | 223 | if itype == 'multiubi': |
227 | # check if result image is in deploy directory | 224 | # For multiubi build we need to manage MULTIUBI_BUILD entry to append |
228 | self.assertTrue(os.path.exists(image_path), | 225 | # specific name to IMAGE_LINK_NAME |
229 | "%s image %s doesn't exist" % (itype, image_path)) | 226 | for vname in bb_vars['MULTIUBI_BUILD'].split(): |
227 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s_%s.ubifs" % (bb_vars['IMAGE_LINK_NAME'], vname)) | ||
228 | # check if result image is in deploy directory | ||
229 | self.assertTrue(os.path.exists(image_path), | ||
230 | "%s image %s doesn't exist" % (itype, image_path)) | ||
231 | else: | ||
232 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % (bb_vars['IMAGE_LINK_NAME'], itype)) | ||
233 | # check if result image is in deploy directory | ||
234 | self.assertTrue(os.path.exists(image_path), | ||
235 | "%s image %s doesn't exist" % (itype, image_path)) | ||
230 | 236 | ||
231 | def test_useradd_static(self): | 237 | def test_useradd_static(self): |
232 | config = """ | 238 | config = """ |
@@ -240,11 +246,11 @@ USERADD_GID_TABLES += "files/static-group" | |||
240 | 246 | ||
241 | def test_no_busybox_base_utils(self): | 247 | def test_no_busybox_base_utils(self): |
242 | config = """ | 248 | config = """ |
243 | # Enable x11 | 249 | # Enable wayland |
244 | DISTRO_FEATURES_append += "x11" | 250 | DISTRO_FEATURES:append = " pam opengl wayland" |
245 | 251 | ||
246 | # Switch to systemd | 252 | # Switch to systemd |
247 | DISTRO_FEATURES += "systemd" | 253 | DISTRO_FEATURES:append = " systemd usrmerge" |
248 | VIRTUAL-RUNTIME_init_manager = "systemd" | 254 | VIRTUAL-RUNTIME_init_manager = "systemd" |
249 | VIRTUAL-RUNTIME_initscripts = "" | 255 | VIRTUAL-RUNTIME_initscripts = "" |
250 | VIRTUAL-RUNTIME_syslog = "" | 256 | VIRTUAL-RUNTIME_syslog = "" |
@@ -257,12 +263,12 @@ VIRTUAL-RUNTIME_base-utils = "packagegroup-core-base-utils" | |||
257 | VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock" | 263 | VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock" |
258 | VIRTUAL-RUNTIME_base-utils-syslog = "" | 264 | VIRTUAL-RUNTIME_base-utils-syslog = "" |
259 | 265 | ||
260 | # Blacklist busybox | 266 | # Skip busybox |
261 | PNBLACKLIST[busybox] = "Don't build this" | 267 | SKIP_RECIPE[busybox] = "Don't build this" |
262 | """ | 268 | """ |
263 | self.write_config(config) | 269 | self.write_config(config) |
264 | 270 | ||
265 | bitbake("--graphviz core-image-sato") | 271 | bitbake("--graphviz core-image-weston") |
266 | 272 | ||
267 | def test_image_gen_debugfs(self): | 273 | def test_image_gen_debugfs(self): |
268 | """ | 274 | """ |
@@ -275,20 +281,20 @@ PNBLACKLIST[busybox] = "Don't build this" | |||
275 | Yeoh Ee Peng <ee.peng.yeoh@intel.com> | 281 | Yeoh Ee Peng <ee.peng.yeoh@intel.com> |
276 | """ | 282 | """ |
277 | 283 | ||
278 | image_name = 'core-image-minimal' | 284 | image = 'core-image-minimal' |
285 | image_fstypes_debugfs = 'tar.bz2' | ||
279 | features = 'IMAGE_GEN_DEBUGFS = "1"\n' | 286 | features = 'IMAGE_GEN_DEBUGFS = "1"\n' |
280 | features += 'IMAGE_FSTYPES_DEBUGFS = "tar.bz2"\n' | 287 | features += 'IMAGE_FSTYPES_DEBUGFS = "%s"\n' % image_fstypes_debugfs |
281 | features += 'MACHINE = "genericx86-64"\n' | ||
282 | self.write_config(features) | 288 | self.write_config(features) |
283 | 289 | ||
284 | bitbake(image_name) | 290 | bitbake(image) |
285 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 291 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) |
286 | dbg_tar_file = os.path.join(deploy_dir_image, "*-dbg.rootfs.tar.bz2") | 292 | |
287 | debug_files = glob.glob(dbg_tar_file) | 293 | dbg_tar_file = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.%s" % (bb_vars['IMAGE_LINK_NAME'], image_fstypes_debugfs)) |
288 | self.assertNotEqual(len(debug_files), 0, 'debug filesystem not generated at %s' % dbg_tar_file) | 294 | self.assertTrue(os.path.exists(dbg_tar_file), 'debug filesystem not generated at %s' % dbg_tar_file) |
289 | result = runCmd('cd %s; tar xvf %s' % (deploy_dir_image, dbg_tar_file)) | 295 | result = runCmd('cd %s; tar xvf %s' % (bb_vars['DEPLOY_DIR_IMAGE'], dbg_tar_file)) |
290 | self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output)) | 296 | self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output)) |
291 | result = runCmd('find %s -name %s' % (deploy_dir_image, "udevadm")) | 297 | result = runCmd('find %s -name %s' % (bb_vars['DEPLOY_DIR_IMAGE'], "udevadm")) |
292 | self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output) | 298 | self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output) |
293 | dbg_symbols_targets = result.output.splitlines() | 299 | dbg_symbols_targets = result.output.splitlines() |
294 | self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets) | 300 | self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets) |
@@ -298,9 +304,33 @@ PNBLACKLIST[busybox] = "Don't build this" | |||
298 | 304 | ||
299 | def test_empty_image(self): | 305 | def test_empty_image(self): |
300 | """Test creation of image with no packages""" | 306 | """Test creation of image with no packages""" |
301 | bitbake('test-empty-image') | 307 | image = 'test-empty-image' |
302 | res_dir = get_bb_var('DEPLOY_DIR_IMAGE') | 308 | bitbake(image) |
303 | images = os.path.join(res_dir, "test-empty-image-*.manifest") | 309 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) |
304 | result = glob.glob(images) | 310 | manifest = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.manifest" % bb_vars['IMAGE_LINK_NAME']) |
305 | with open(result[1],"r") as f: | 311 | self.assertTrue(os.path.exists(manifest)) |
312 | |||
313 | with open(manifest, "r") as f: | ||
306 | self.assertEqual(len(f.read().strip()),0) | 314 | self.assertEqual(len(f.read().strip()),0) |
315 | |||
316 | def test_mandb(self): | ||
317 | """ | ||
318 | Test that an image containing manpages has working man and apropos commands. | ||
319 | """ | ||
320 | config = """ | ||
321 | DISTRO_FEATURES:append = " api-documentation" | ||
322 | CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc" | ||
323 | """ | ||
324 | self.write_config(config) | ||
325 | bitbake("core-image-minimal") | ||
326 | |||
327 | with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu: | ||
328 | # This manpage is provided by man-pages | ||
329 | status, output = qemu.run_serial("apropos 8859") | ||
330 | self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output)) | ||
331 | self.assertIn("iso_8859_15", output) | ||
332 | |||
333 | # This manpage is provided by kmod | ||
334 | status, output = qemu.run_serial("man --pager=cat modprobe") | ||
335 | self.assertEqual(status, 1, 'Failed to run man: %s' % (output)) | ||
336 | self.assertIn("force-modversion", output) | ||
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py index 152da6332a..f4af67a239 100644 --- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py +++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py | |||
@@ -1,10 +1,16 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
1 | from oeqa.selftest.case import OESelftestTestCase | 6 | from oeqa.selftest.case import OESelftestTestCase |
2 | from oeqa.utils.commands import bitbake | 7 | from oeqa.utils.commands import bitbake |
3 | 8 | ||
4 | class IncompatibleLicenseTests(OESelftestTestCase): | 9 | class IncompatibleLicenseTestObsolete(OESelftestTestCase): |
5 | 10 | ||
6 | def lic_test(self, pn, pn_lic, lic): | 11 | def lic_test(self, pn, pn_lic, lic, error_msg=None): |
7 | error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic) | 12 | if not error_msg: |
13 | error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic) | ||
8 | 14 | ||
9 | self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic)) | 15 | self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic)) |
10 | 16 | ||
@@ -12,72 +18,81 @@ class IncompatibleLicenseTests(OESelftestTestCase): | |||
12 | if error_msg not in result.output: | 18 | if error_msg not in result.output: |
13 | raise AssertionError(result.output) | 19 | raise AssertionError(result.output) |
14 | 20 | ||
15 | # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) | 21 | # Verify that a package with an SPDX license cannot be built when |
16 | # cannot be built when INCOMPATIBLE_LICENSE contains this SPDX license | 22 | # INCOMPATIBLE_LICENSE contains an alias (in SPDXLICENSEMAP) of this SPDX |
17 | def test_incompatible_spdx_license(self): | 23 | # license |
18 | self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only') | ||
19 | |||
20 | # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) | ||
21 | # cannot be built when INCOMPATIBLE_LICENSE contains an alias (in | ||
22 | # SPDXLICENSEMAP) of this SPDX license | ||
23 | def test_incompatible_alias_spdx_license(self): | 24 | def test_incompatible_alias_spdx_license(self): |
24 | self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3') | 25 | self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE") |
25 | |||
26 | # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) | ||
27 | # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded license | ||
28 | # matching this SPDX license | ||
29 | def test_incompatible_spdx_license_wildcard(self): | ||
30 | self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPL-3.0-only') | ||
31 | 26 | ||
32 | # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) | 27 | # Verify that a package with an SPDX license cannot be built when |
33 | # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded alias | 28 | # INCOMPATIBLE_LICENSE contains a wildcarded alias license matching this |
34 | # license matching this SPDX license | 29 | # SPDX license |
35 | def test_incompatible_alias_spdx_license_wildcard(self): | 30 | def test_incompatible_alias_spdx_license_wildcard(self): |
36 | self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3') | 31 | self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry") |
37 | |||
38 | # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX | ||
39 | # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX | ||
40 | # license | ||
41 | def test_incompatible_spdx_license_alias(self): | ||
42 | self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only') | ||
43 | 32 | ||
44 | # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX | 33 | # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX |
45 | # license cannot be built when INCOMPATIBLE_LICENSE contains this alias | 34 | # license cannot be built when INCOMPATIBLE_LICENSE contains this alias |
46 | def test_incompatible_alias_spdx_license_alias(self): | 35 | def test_incompatible_alias_spdx_license_alias(self): |
47 | self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3') | 36 | self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE") |
48 | 37 | ||
49 | # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX | 38 | # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX |
50 | # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded | 39 | # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded |
51 | # license matching this SPDX license | 40 | # license matching this SPDX license |
52 | def test_incompatible_spdx_license_alias_wildcard(self): | 41 | def test_incompatible_spdx_license_alias_wildcard(self): |
53 | self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0') | 42 | self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0', "*GPL-3.0 is an invalid license wildcard entry") |
54 | 43 | ||
55 | # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX | 44 | # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX |
56 | # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded | 45 | # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded |
57 | # alias license matching the SPDX license | 46 | # alias license matching the SPDX license |
58 | def test_incompatible_alias_spdx_license_alias_wildcard(self): | 47 | def test_incompatible_alias_spdx_license_alias_wildcard(self): |
59 | self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3') | 48 | self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry") |
60 | 49 | ||
61 | # Verify that a package with multiple SPDX licenses (from | ||
62 | # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains | ||
63 | # some of them | ||
64 | def test_incompatible_spdx_licenses(self): | ||
65 | self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only') | ||
66 | 50 | ||
67 | # Verify that a package with multiple SPDX licenses (from | 51 | # Verify that a package with multiple SPDX licenses cannot be built when |
68 | # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a | 52 | # INCOMPATIBLE_LICENSE contains a wildcard to some of them |
69 | # wildcard to some of them | ||
70 | def test_incompatible_spdx_licenses_wildcard(self): | 53 | def test_incompatible_spdx_licenses_wildcard(self): |
71 | self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only') | 54 | self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only', "*GPL-3.0-only is an invalid license wildcard entry") |
72 | 55 | ||
73 | # Verify that a package with multiple SPDX licenses (from | 56 | |
74 | # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a | 57 | # Verify that a package with multiple SPDX licenses cannot be built when |
75 | # wildcard matching all licenses | 58 | # INCOMPATIBLE_LICENSE contains a wildcard matching all licenses |
76 | def test_incompatible_all_licenses_wildcard(self): | 59 | def test_incompatible_all_licenses_wildcard(self): |
77 | self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*') | 60 | self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*', "* is an invalid license wildcard entry") |
61 | |||
62 | class IncompatibleLicenseTests(OESelftestTestCase): | ||
63 | |||
64 | def lic_test(self, pn, pn_lic, lic): | ||
65 | error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic) | ||
66 | |||
67 | self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic)) | ||
68 | |||
69 | result = bitbake('%s --dry-run' % (pn), ignore_status=True) | ||
70 | if error_msg not in result.output: | ||
71 | raise AssertionError(result.output) | ||
72 | |||
73 | # Verify that a package with an SPDX license cannot be built when | ||
74 | # INCOMPATIBLE_LICENSE contains this SPDX license | ||
75 | def test_incompatible_spdx_license(self): | ||
76 | self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only') | ||
78 | 77 | ||
79 | # Verify that a package with a non-SPDX license (neither in | 78 | # Verify that a package with an SPDX license cannot be built when |
80 | # AVAILABLE_LICENSES nor in SPDXLICENSEMAP) cannot be built when | 79 | # INCOMPATIBLE_LICENSE contains a wildcarded license matching this SPDX |
80 | # license | ||
81 | def test_incompatible_spdx_license_wildcard(self): | ||
82 | self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0*') | ||
83 | |||
84 | # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX | ||
85 | # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX | ||
86 | # license | ||
87 | def test_incompatible_spdx_license_alias(self): | ||
88 | self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only') | ||
89 | |||
90 | # Verify that a package with multiple SPDX licenses cannot be built when | ||
91 | # INCOMPATIBLE_LICENSE contains some of them | ||
92 | def test_incompatible_spdx_licenses(self): | ||
93 | self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only') | ||
94 | |||
95 | # Verify that a package with a non-SPDX license cannot be built when | ||
81 | # INCOMPATIBLE_LICENSE contains this license | 96 | # INCOMPATIBLE_LICENSE contains this license |
82 | def test_incompatible_nonspdx_license(self): | 97 | def test_incompatible_nonspdx_license(self): |
83 | self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense') | 98 | self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense') |
@@ -85,8 +100,8 @@ class IncompatibleLicenseTests(OESelftestTestCase): | |||
85 | class IncompatibleLicensePerImageTests(OESelftestTestCase): | 100 | class IncompatibleLicensePerImageTests(OESelftestTestCase): |
86 | def default_config(self): | 101 | def default_config(self): |
87 | return """ | 102 | return """ |
88 | IMAGE_INSTALL_append = " bash" | 103 | IMAGE_INSTALL:append = " bash" |
89 | INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" | 104 | INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*" |
90 | """ | 105 | """ |
91 | 106 | ||
92 | def test_bash_default(self): | 107 | def test_bash_default(self): |
@@ -98,7 +113,8 @@ INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" | |||
98 | raise AssertionError(result.output) | 113 | raise AssertionError(result.output) |
99 | 114 | ||
100 | def test_bash_and_license(self): | 115 | def test_bash_and_license(self): |
101 | self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " & SomeLicense"') | 116 | self.disable_class("create-spdx") |
117 | self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"') | ||
102 | error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" | 118 | error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" |
103 | 119 | ||
104 | result = bitbake('core-image-minimal', ignore_status=True) | 120 | result = bitbake('core-image-minimal', ignore_status=True) |
@@ -106,30 +122,31 @@ INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" | |||
106 | raise AssertionError(result.output) | 122 | raise AssertionError(result.output) |
107 | 123 | ||
108 | def test_bash_or_license(self): | 124 | def test_bash_or_license(self): |
109 | self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " | SomeLicense"') | 125 | self.disable_class("create-spdx") |
126 | self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"') | ||
110 | 127 | ||
111 | bitbake('core-image-minimal') | 128 | bitbake('core-image-minimal') |
112 | 129 | ||
113 | def test_bash_whitelist(self): | 130 | def test_bash_license_exceptions(self): |
114 | self.write_config(self.default_config() + '\nWHITELIST_GPL-3.0_pn-core-image-minimal = "bash"') | 131 | self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"') |
115 | 132 | ||
116 | bitbake('core-image-minimal') | 133 | bitbake('core-image-minimal') |
117 | 134 | ||
118 | class NoGPL3InImagesTests(OESelftestTestCase): | 135 | class NoGPL3InImagesTests(OESelftestTestCase): |
119 | def test_core_image_minimal(self): | 136 | def test_core_image_minimal(self): |
120 | self.write_config(""" | 137 | self.write_config(""" |
121 | INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" | 138 | INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*" |
122 | """) | 139 | """) |
123 | bitbake('core-image-minimal') | 140 | bitbake('core-image-minimal') |
124 | 141 | ||
125 | def test_core_image_full_cmdline(self): | 142 | def test_core_image_full_cmdline_weston(self): |
126 | self.write_config(""" | 143 | self.write_config(""" |
127 | INHERIT += "testimage"\n | 144 | IMAGE_CLASSES += "testimage" |
128 | INCOMPATIBLE_LICENSE_pn-core-image-full-cmdline = "GPL-3.0 LGPL-3.0"\n | 145 | INCOMPATIBLE_LICENSE:pn-core-image-full-cmdline = "GPL-3.0* LGPL-3.0*" |
129 | RDEPENDS_packagegroup-core-full-cmdline-utils_remove = "bash bc coreutils cpio ed findutils gawk grep mc mc-fish mc-helpers mc-helpers-perl sed tar time"\n | 146 | INCOMPATIBLE_LICENSE:pn-core-image-weston = "GPL-3.0* LGPL-3.0*" |
130 | RDEPENDS_packagegroup-core-full-cmdline-dev-utils_remove = "diffutils m4 make patch"\n | 147 | |
131 | RDEPENDS_packagegroup-core-full-cmdline-multiuser_remove = "gzip"\n | 148 | require conf/distro/include/no-gplv3.inc |
132 | """) | 149 | """) |
133 | bitbake('core-image-full-cmdline') | 150 | bitbake('core-image-full-cmdline core-image-weston') |
134 | bitbake('-c testimage core-image-full-cmdline') | 151 | bitbake('-c testimage core-image-full-cmdline core-image-weston') |
135 | 152 | ||
diff --git a/meta/lib/oeqa/selftest/cases/intercept.py b/meta/lib/oeqa/selftest/cases/intercept.py new file mode 100644 index 0000000000..12583c3099 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/intercept.py | |||
@@ -0,0 +1,21 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.utils.commands import bitbake | ||
9 | |||
10 | class GitCheck(OESelftestTestCase): | ||
11 | def test_git_intercept(self): | ||
12 | """ | ||
13 | Git binaries with CVE-2022-24765 fixed will refuse to operate on a | ||
14 | repository which is owned by a different user. This breaks our | ||
15 | do_install task as that runs inside pseudo, so the git repository is | ||
16 | owned by the build user but git is running as (fake)root. | ||
17 | |||
18 | We have an intercept which disables pseudo, so verify that it works. | ||
19 | """ | ||
20 | bitbake("git-submodule-test -c test_git_as_user") | ||
21 | bitbake("git-submodule-test -c test_git_as_root") | ||
diff --git a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py index a61876ee61..b1f78a0cd1 100644 --- a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py +++ b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py | |||
@@ -1,3 +1,9 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
1 | import os | 7 | import os |
2 | from oeqa.selftest.case import OESelftestTestCase | 8 | from oeqa.selftest.case import OESelftestTestCase |
3 | from oeqa.utils.commands import runCmd, get_bb_var | 9 | from oeqa.utils.commands import runCmd, get_bb_var |
@@ -58,7 +64,8 @@ class KernelDev(OESelftestTestCase): | |||
58 | recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend') | 64 | recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend') |
59 | with open(recipe_append, 'w+') as fh: | 65 | with open(recipe_append, 'w+') as fh: |
60 | fh.write('SRC_URI += "file://%s"\n' % patch_name) | 66 | fh.write('SRC_URI += "file://%s"\n' % patch_name) |
61 | fh.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"') | 67 | fh.write('ERROR_QA:remove:pn-linux-yocto = "patch-status"\n') |
68 | fh.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"') | ||
62 | 69 | ||
63 | runCmd('bitbake virtual/kernel -c clean') | 70 | runCmd('bitbake virtual/kernel -c clean') |
64 | runCmd('bitbake virtual/kernel -c patch') | 71 | runCmd('bitbake virtual/kernel -c patch') |
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py index 05e9426fc6..379ed589ad 100644 --- a/meta/lib/oeqa/selftest/cases/layerappend.py +++ b/meta/lib/oeqa/selftest/cases/layerappend.py | |||
@@ -1,11 +1,13 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | import os | 7 | import os |
6 | 8 | ||
7 | from oeqa.selftest.case import OESelftestTestCase | 9 | from oeqa.selftest.case import OESelftestTestCase |
8 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var | 10 | from oeqa.utils.commands import bitbake, get_bb_var |
9 | import oeqa.utils.ftools as ftools | 11 | import oeqa.utils.ftools as ftools |
10 | 12 | ||
11 | class LayerAppendTests(OESelftestTestCase): | 13 | class LayerAppendTests(OESelftestTestCase): |
@@ -30,20 +32,20 @@ python do_build() { | |||
30 | addtask build | 32 | addtask build |
31 | """ | 33 | """ |
32 | append = """ | 34 | append = """ |
33 | FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:" | 35 | FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:" |
34 | 36 | ||
35 | SRC_URI_append = " file://appendtest.txt" | 37 | SRC_URI:append = " file://appendtest.txt" |
36 | 38 | ||
37 | sysroot_stage_all_append() { | 39 | sysroot_stage_all:append() { |
38 | install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ | 40 | install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ |
39 | } | 41 | } |
40 | 42 | ||
41 | """ | 43 | """ |
42 | 44 | ||
43 | append2 = """ | 45 | append2 = """ |
44 | FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:" | 46 | FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:" |
45 | 47 | ||
46 | SRC_URI_append = " file://appendtest.txt" | 48 | SRC_URI:append = " file://appendtest.txt" |
47 | """ | 49 | """ |
48 | layerappend = '' | 50 | layerappend = '' |
49 | 51 | ||
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py index afe8f8809f..d5ffffdcb4 100644 --- a/meta/lib/oeqa/selftest/cases/liboe.py +++ b/meta/lib/oeqa/selftest/cases/liboe.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -97,6 +99,6 @@ class LibOE(OESelftestTestCase): | |||
97 | 99 | ||
98 | dstcnt = len(os.listdir(dst)) | 100 | dstcnt = len(os.listdir(dst)) |
99 | srccnt = len(os.listdir(src)) | 101 | srccnt = len(os.listdir(src)) |
100 | self.assertEquals(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt)) | 102 | self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt)) |
101 | 103 | ||
102 | oe.path.remove(testloc) | 104 | oe.path.remove(testloc) |
diff --git a/meta/lib/oeqa/selftest/cases/lic_checksum.py b/meta/lib/oeqa/selftest/cases/lic_checksum.py index bae935d697..2d0b805b90 100644 --- a/meta/lib/oeqa/selftest/cases/lic_checksum.py +++ b/meta/lib/oeqa/selftest/cases/lic_checksum.py | |||
@@ -1,16 +1,36 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | import os | 7 | import os |
6 | import tempfile | 8 | import tempfile |
9 | import urllib | ||
7 | 10 | ||
8 | from oeqa.selftest.case import OESelftestTestCase | 11 | from oeqa.selftest.case import OESelftestTestCase |
9 | from oeqa.utils.commands import bitbake | 12 | from oeqa.utils.commands import bitbake |
10 | from oeqa.utils import CommandError | ||
11 | 13 | ||
12 | class LicenseTests(OESelftestTestCase): | 14 | class LicenseTests(OESelftestTestCase): |
13 | 15 | ||
16 | def test_checksum_with_space(self): | ||
17 | bitbake_cmd = '-c populate_lic emptytest' | ||
18 | |||
19 | lic_file, lic_path = tempfile.mkstemp(" -afterspace") | ||
20 | os.close(lic_file) | ||
21 | #self.track_for_cleanup(lic_path) | ||
22 | |||
23 | self.write_config("INHERIT:remove = \"report-error\"") | ||
24 | |||
25 | self.write_recipeinc('emptytest', """ | ||
26 | INHIBIT_DEFAULT_DEPS = "1" | ||
27 | LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e" | ||
28 | SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e" | ||
29 | """ % (urllib.parse.quote(lic_path), urllib.parse.quote(lic_path))) | ||
30 | result = bitbake(bitbake_cmd) | ||
31 | self.delete_recipeinc('emptytest') | ||
32 | |||
33 | |||
14 | # Verify that changing a license file that has an absolute path causes | 34 | # Verify that changing a license file that has an absolute path causes |
15 | # the license qa to fail due to a mismatched md5sum. | 35 | # the license qa to fail due to a mismatched md5sum. |
16 | def test_nonmatching_checksum(self): | 36 | def test_nonmatching_checksum(self): |
@@ -21,7 +41,7 @@ class LicenseTests(OESelftestTestCase): | |||
21 | os.close(lic_file) | 41 | os.close(lic_file) |
22 | self.track_for_cleanup(lic_path) | 42 | self.track_for_cleanup(lic_path) |
23 | 43 | ||
24 | self.write_config("INHERIT_remove = \"report-error\"") | 44 | self.write_config("INHERIT:remove = \"report-error\"") |
25 | 45 | ||
26 | self.write_recipeinc('emptytest', """ | 46 | self.write_recipeinc('emptytest', """ |
27 | INHIBIT_DEFAULT_DEPS = "1" | 47 | INHIBIT_DEFAULT_DEPS = "1" |
@@ -34,5 +54,6 @@ SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e" | |||
34 | f.write("data") | 54 | f.write("data") |
35 | 55 | ||
36 | result = bitbake(bitbake_cmd, ignore_status=True) | 56 | result = bitbake(bitbake_cmd, ignore_status=True) |
57 | self.delete_recipeinc('emptytest') | ||
37 | if error_msg not in result.output: | 58 | if error_msg not in result.output: |
38 | raise AssertionError(result.output) | 59 | raise AssertionError(result.output) |
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py new file mode 100644 index 0000000000..4ca8ffb7aa --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/locales.py | |||
@@ -0,0 +1,54 @@ | |||
1 | # | ||
2 | # SPDX-License-Identifier: MIT | ||
3 | # | ||
4 | |||
5 | from oeqa.selftest.case import OESelftestTestCase | ||
6 | from oeqa.core.decorator import OETestTag | ||
7 | from oeqa.utils.commands import bitbake, runqemu | ||
8 | |||
9 | class LocalesTest(OESelftestTestCase): | ||
10 | |||
11 | @OETestTag("runqemu") | ||
12 | |||
13 | def run_locales_test(self, binary_enabled): | ||
14 | features = [] | ||
15 | features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"') | ||
16 | features.append('IMAGE_INSTALL:append = " glibc-utils localedef"') | ||
17 | features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"') | ||
18 | features.append('IMAGE_LINGUAS:append = " en-us fr-fr"') | ||
19 | if binary_enabled: | ||
20 | features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"') | ||
21 | else: | ||
22 | features.append('ENABLE_BINARY_LOCALE_GENERATION = "0"') | ||
23 | self.write_config("\n".join(features)) | ||
24 | |||
25 | # Build a core-image-minimal | ||
26 | bitbake('core-image-minimal') | ||
27 | |||
28 | with runqemu("core-image-minimal", ssh=False, runqemuparams='nographic') as qemu: | ||
29 | cmd = "locale -a" | ||
30 | status, output = qemu.run_serial(cmd) | ||
31 | # output must includes fr_FR or fr_FR.UTF-8 | ||
32 | self.assertEqual(status, 1, msg='locale test command failed: output: %s' % output) | ||
33 | self.assertIn("fr_FR", output, msg='locale -a test failed: output: %s' % output) | ||
34 | |||
35 | cmd = "localedef --list-archive -v" | ||
36 | status, output = qemu.run_serial(cmd) | ||
37 | # output must includes fr_FR.utf8 | ||
38 | self.assertEqual(status, 1, msg='localedef test command failed: output: %s' % output) | ||
39 | self.assertIn("fr_FR.utf8", output, msg='localedef test failed: output: %s' % output) | ||
40 | |||
41 | def test_locales_on(self): | ||
42 | """ | ||
43 | Summary: Test the locales are generated | ||
44 | Expected: 1. Check the locale exist in the locale-archive | ||
45 | 2. Check the locale exist for the glibc | ||
46 | 3. Check the locale can be generated | ||
47 | Product: oe-core | ||
48 | Author: Louis Rannou <lrannou@baylibre.com> | ||
49 | AutomatedBy: Louis Rannou <lrannou@baylibre.com> | ||
50 | """ | ||
51 | self.run_locales_test(True) | ||
52 | |||
53 | def test_locales_off(self): | ||
54 | self.run_locales_test(False) | ||
diff --git a/meta/lib/oeqa/selftest/cases/manifest.py b/meta/lib/oeqa/selftest/cases/manifest.py index 5d13f35468..07a6c80489 100644 --- a/meta/lib/oeqa/selftest/cases/manifest.py +++ b/meta/lib/oeqa/selftest/cases/manifest.py | |||
@@ -1,11 +1,13 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | import os | 7 | import os |
6 | 8 | ||
7 | from oeqa.selftest.case import OESelftestTestCase | 9 | from oeqa.selftest.case import OESelftestTestCase |
8 | from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake | 10 | from oeqa.utils.commands import get_bb_var, bitbake |
9 | 11 | ||
10 | class ManifestEntry: | 12 | class ManifestEntry: |
11 | '''A manifest item of a collection able to list missing packages''' | 13 | '''A manifest item of a collection able to list missing packages''' |
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py index 6f10d30dc9..ffe0d2604d 100644 --- a/meta/lib/oeqa/selftest/cases/meta_ide.py +++ b/meta/lib/oeqa/selftest/cases/meta_ide.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -16,13 +18,15 @@ class MetaIDE(OESelftestTestCase): | |||
16 | def setUpClass(cls): | 18 | def setUpClass(cls): |
17 | super(MetaIDE, cls).setUpClass() | 19 | super(MetaIDE, cls).setUpClass() |
18 | bitbake('meta-ide-support') | 20 | bitbake('meta-ide-support') |
19 | bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'TMPDIR', 'COREBASE']) | 21 | bitbake('build-sysroots -c build_native_sysroot') |
22 | bitbake('build-sysroots -c build_target_sysroot') | ||
23 | bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE']) | ||
20 | cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS'] | 24 | cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS'] |
21 | cls.tmpdir = bb_vars['TMPDIR'] | 25 | cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE'] |
22 | cls.environment_script_path = '%s/%s' % (cls.tmpdir, cls.environment_script) | 26 | cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script) |
23 | cls.corebasedir = bb_vars['COREBASE'] | 27 | cls.corebasedir = bb_vars['COREBASE'] |
24 | cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide') | 28 | cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide') |
25 | 29 | ||
26 | @classmethod | 30 | @classmethod |
27 | def tearDownClass(cls): | 31 | def tearDownClass(cls): |
28 | shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True) | 32 | shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True) |
@@ -40,12 +44,17 @@ class MetaIDE(OESelftestTestCase): | |||
40 | def test_meta_ide_can_build_cpio_project(self): | 44 | def test_meta_ide_can_build_cpio_project(self): |
41 | dl_dir = self.td.get('DL_DIR', None) | 45 | dl_dir = self.td.get('DL_DIR', None) |
42 | self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path, | 46 | self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path, |
43 | "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz", | 47 | "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz", |
44 | self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir) | 48 | self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir) |
45 | self.project.download_archive() | 49 | self.project.download_archive() |
46 | self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS --disable-maintainer-mode','sed -i -e "/char \*program_name/d" src/global.c;'), 0, | 50 | self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS'), 0, |
47 | msg="Running configure failed") | 51 | msg="Running configure failed") |
48 | self.assertEqual(self.project.run_make(), 0, | 52 | self.assertEqual(self.project.run_make(), 0, |
49 | msg="Running make failed") | 53 | msg="Running make failed") |
50 | self.assertEqual(self.project.run_install(), 0, | 54 | self.assertEqual(self.project.run_install(), 0, |
51 | msg="Running make install failed") | 55 | msg="Running make install failed") |
56 | |||
57 | def test_meta_ide_can_run_sdk_tests(self): | ||
58 | bitbake('-c populate_sysroot gtk+3') | ||
59 | bitbake('build-sysroots -c build_target_sysroot') | ||
60 | bitbake('-c testsdk meta-ide-support') | ||
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py new file mode 100644 index 0000000000..2919f07939 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py | |||
@@ -0,0 +1,44 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | import os | ||
7 | import subprocess | ||
8 | import tempfile | ||
9 | import shutil | ||
10 | |||
11 | from oeqa.selftest.case import OESelftestTestCase | ||
12 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd | ||
13 | |||
14 | |||
15 | class Minidebuginfo(OESelftestTestCase): | ||
16 | def test_minidebuginfo(self): | ||
17 | target_sys = get_bb_var("TARGET_SYS") | ||
18 | binutils = "binutils-cross-{}".format(get_bb_var("TARGET_ARCH")) | ||
19 | |||
20 | image = 'core-image-minimal' | ||
21 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'READELF'], image) | ||
22 | |||
23 | self.write_config(""" | ||
24 | DISTRO_FEATURES:append = " minidebuginfo" | ||
25 | IMAGE_FSTYPES = "tar.bz2" | ||
26 | """) | ||
27 | bitbake("{} {}:do_addto_recipe_sysroot".format(image, binutils)) | ||
28 | |||
29 | native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", binutils) | ||
30 | |||
31 | # confirm that executables and shared libraries contain an ELF section | ||
32 | # ".gnu_debugdata" which stores minidebuginfo. | ||
33 | with tempfile.TemporaryDirectory(prefix = "unpackfs-") as unpackedfs: | ||
34 | filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "{}.tar.bz2".format(bb_vars['IMAGE_LINK_NAME'])) | ||
35 | shutil.unpack_archive(filename, unpackedfs) | ||
36 | |||
37 | r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "bin", "busybox")], | ||
38 | native_sysroot = native_sysroot, target_sys = target_sys) | ||
39 | self.assertIn(".gnu_debugdata", r.output) | ||
40 | |||
41 | r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "lib", "libc.so.6")], | ||
42 | native_sysroot = native_sysroot, target_sys = target_sys) | ||
43 | self.assertIn(".gnu_debugdata", r.output) | ||
44 | |||
diff --git a/meta/lib/oeqa/selftest/cases/multiconfig.py b/meta/lib/oeqa/selftest/cases/multiconfig.py index 39b92f2439..f509cbf607 100644 --- a/meta/lib/oeqa/selftest/cases/multiconfig.py +++ b/meta/lib/oeqa/selftest/cases/multiconfig.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -17,7 +19,7 @@ class MultiConfig(OESelftestTestCase): | |||
17 | """ | 19 | """ |
18 | 20 | ||
19 | config = """ | 21 | config = """ |
20 | IMAGE_INSTALL_append_pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl" | 22 | IMAGE_INSTALL:append:pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl" |
21 | BBMULTICONFIG = "tiny musl" | 23 | BBMULTICONFIG = "tiny musl" |
22 | """ | 24 | """ |
23 | self.write_config(config) | 25 | self.write_config(config) |
@@ -52,7 +54,7 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny" | |||
52 | self.write_config(config) | 54 | self.write_config(config) |
53 | 55 | ||
54 | testconfig = textwrap.dedent('''\ | 56 | testconfig = textwrap.dedent('''\ |
55 | MCTESTVAR_append = "1" | 57 | MCTESTVAR:append = "1" |
56 | ''') | 58 | ''') |
57 | self.write_config(testconfig, 'test') | 59 | self.write_config(testconfig, 'test') |
58 | 60 | ||
@@ -64,9 +66,22 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny" | |||
64 | self.assertIn('MCTESTVAR=test1', result.output.splitlines()) | 66 | self.assertIn('MCTESTVAR=test1', result.output.splitlines()) |
65 | 67 | ||
66 | testconfig = textwrap.dedent('''\ | 68 | testconfig = textwrap.dedent('''\ |
67 | MCTESTVAR_append = "2" | 69 | MCTESTVAR:append = "2" |
68 | ''') | 70 | ''') |
69 | self.write_config(testconfig, 'test') | 71 | self.write_config(testconfig, 'test') |
70 | 72 | ||
71 | result = bitbake('mc:test:multiconfig-test-parse -c showvar') | 73 | result = bitbake('mc:test:multiconfig-test-parse -c showvar') |
72 | self.assertIn('MCTESTVAR=test2', result.output.splitlines()) | 74 | self.assertIn('MCTESTVAR=test2', result.output.splitlines()) |
75 | |||
76 | def test_multiconfig_inlayer(self): | ||
77 | """ | ||
78 | Test that a multiconfig from meta-selftest works. | ||
79 | """ | ||
80 | |||
81 | config = """ | ||
82 | BBMULTICONFIG = "muslmc" | ||
83 | """ | ||
84 | self.write_config(config) | ||
85 | |||
86 | # Build a core-image-minimal, only dry run needed to check config is present | ||
87 | bitbake('mc:muslmc:bash -n') | ||
diff --git a/meta/lib/oeqa/selftest/cases/newlib.py b/meta/lib/oeqa/selftest/cases/newlib.py new file mode 100644 index 0000000000..fe57aa51f2 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/newlib.py | |||
@@ -0,0 +1,13 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.utils.commands import bitbake | ||
9 | |||
10 | class NewlibTest(OESelftestTestCase): | ||
11 | def test_newlib(self): | ||
12 | self.write_config('TCLIBC = "newlib"') | ||
13 | bitbake("newlib libgloss") | ||
diff --git a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py index 802a91a488..042ccdd2b4 100644 --- a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py +++ b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py | |||
@@ -1,8 +1,11 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | import os | 7 | import os |
8 | import sys | ||
6 | from oeqa.selftest.case import OESelftestTestCase | 9 | from oeqa.selftest.case import OESelftestTestCase |
7 | import tempfile | 10 | import tempfile |
8 | import operator | 11 | import operator |
@@ -11,15 +14,14 @@ from oeqa.utils.commands import get_bb_var | |||
11 | class TestBlobParsing(OESelftestTestCase): | 14 | class TestBlobParsing(OESelftestTestCase): |
12 | 15 | ||
13 | def setUp(self): | 16 | def setUp(self): |
14 | import time | ||
15 | self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory', | 17 | self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory', |
16 | dir=get_bb_var('TOPDIR')) | 18 | dir=get_bb_var('TOPDIR')) |
17 | 19 | ||
18 | try: | 20 | try: |
19 | from git import Repo | 21 | from git import Repo |
20 | self.repo = Repo.init(self.repo_path) | 22 | self.repo = Repo.init(self.repo_path) |
21 | except ImportError: | 23 | except ImportError as e: |
22 | self.skipTest('Python module GitPython is not present') | 24 | self.skipTest('Python module GitPython is not present (%s) (%s)' % (e, sys.path)) |
23 | 25 | ||
24 | self.test_file = "test" | 26 | self.test_file = "test" |
25 | self.var_map = {} | 27 | self.var_map = {} |
@@ -28,6 +30,16 @@ class TestBlobParsing(OESelftestTestCase): | |||
28 | import shutil | 30 | import shutil |
29 | shutil.rmtree(self.repo_path) | 31 | shutil.rmtree(self.repo_path) |
30 | 32 | ||
33 | @property | ||
34 | def heads_default(self): | ||
35 | """ | ||
36 | Support repos defaulting to master or to main branch | ||
37 | """ | ||
38 | try: | ||
39 | return self.repo.heads.main | ||
40 | except AttributeError: | ||
41 | return self.repo.heads.master | ||
42 | |||
31 | def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"): | 43 | def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"): |
32 | if len(to_add) == 0 and len(to_remove) == 0: | 44 | if len(to_add) == 0 and len(to_remove) == 0: |
33 | return | 45 | return |
@@ -65,10 +77,10 @@ class TestBlobParsing(OESelftestTestCase): | |||
65 | changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")} | 77 | changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")} |
66 | 78 | ||
67 | self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" }) | 79 | self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" }) |
68 | blob1 = self.repo.heads.master.commit.tree.blobs[0] | 80 | blob1 = self.heads_default.commit.tree.blobs[0] |
69 | 81 | ||
70 | self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" }) | 82 | self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" }) |
71 | blob2 = self.repo.heads.master.commit.tree.blobs[0] | 83 | blob2 = self.heads_default.commit.tree.blobs[0] |
72 | 84 | ||
73 | change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), | 85 | change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), |
74 | blob1, blob2, False, False) | 86 | blob1, blob2, False, False) |
@@ -84,10 +96,10 @@ class TestBlobParsing(OESelftestTestCase): | |||
84 | defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]} | 96 | defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]} |
85 | 97 | ||
86 | self.commit_vars(to_add = { "foo" : "1" }) | 98 | self.commit_vars(to_add = { "foo" : "1" }) |
87 | blob1 = self.repo.heads.master.commit.tree.blobs[0] | 99 | blob1 = self.heads_default.commit.tree.blobs[0] |
88 | 100 | ||
89 | self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" }) | 101 | self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" }) |
90 | blob2 = self.repo.heads.master.commit.tree.blobs[0] | 102 | blob2 = self.heads_default.commit.tree.blobs[0] |
91 | 103 | ||
92 | change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), | 104 | change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), |
93 | blob1, blob2, False, False) | 105 | blob1, blob2, False, False) |
diff --git a/meta/lib/oeqa/selftest/cases/oelib/elf.py b/meta/lib/oeqa/selftest/cases/oelib/elf.py index 5a5f9b4fdf..7bf550b6fd 100644 --- a/meta/lib/oeqa/selftest/cases/oelib/elf.py +++ b/meta/lib/oeqa/selftest/cases/oelib/elf.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/meta/lib/oeqa/selftest/cases/oelib/license.py b/meta/lib/oeqa/selftest/cases/oelib/license.py index 6ebbee589f..5eea12e761 100644 --- a/meta/lib/oeqa/selftest/cases/oelib/license.py +++ b/meta/lib/oeqa/selftest/cases/oelib/license.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -15,11 +17,11 @@ class SeenVisitor(oe.license.LicenseVisitor): | |||
15 | 17 | ||
16 | class TestSingleLicense(TestCase): | 18 | class TestSingleLicense(TestCase): |
17 | licenses = [ | 19 | licenses = [ |
18 | "GPLv2", | 20 | "GPL-2.0-only", |
19 | "LGPL-2.0", | 21 | "LGPL-2.0-only", |
20 | "Artistic", | 22 | "Artistic-1.0", |
21 | "MIT", | 23 | "MIT", |
22 | "GPLv3+", | 24 | "GPL-3.0-or-later", |
23 | "FOO_BAR", | 25 | "FOO_BAR", |
24 | ] | 26 | ] |
25 | invalid_licenses = ["GPL/BSD"] | 27 | invalid_licenses = ["GPL/BSD"] |
@@ -67,9 +69,9 @@ class TestComplexCombinations(TestSimpleCombinations): | |||
67 | "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"], | 69 | "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"], |
68 | "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"], | 70 | "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"], |
69 | "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"], | 71 | "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"], |
70 | "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"], | 72 | "(GPL-2.0-only|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0-only", "BSD-4-clause", "MIT"], |
71 | } | 73 | } |
72 | preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"] | 74 | preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0-only"] |
73 | 75 | ||
74 | class TestIsIncluded(TestCase): | 76 | class TestIsIncluded(TestCase): |
75 | tests = { | 77 | tests = { |
@@ -87,12 +89,12 @@ class TestIsIncluded(TestCase): | |||
87 | [True, ["BAR", "FOOBAR"]], | 89 | [True, ["BAR", "FOOBAR"]], |
88 | ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"): | 90 | ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"): |
89 | [True, ["BAZ", "MOO", "BARFOO"]], | 91 | [True, ["BAZ", "MOO", "BARFOO"]], |
90 | ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, None): | 92 | ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, None): |
91 | [True, ["GPL-3.0", "GPL-2.0", "LGPL-2.1"]], | 93 | [True, ["GPL-3.0-or-later", "GPL-2.0-only", "LGPL-2.1-only"]], |
92 | ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0"): | 94 | ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later"): |
93 | [True, ["Proprietary"]], | 95 | [True, ["Proprietary"]], |
94 | ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0 Proprietary"): | 96 | ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later Proprietary"): |
95 | [False, ["GPL-3.0"]] | 97 | [False, ["GPL-3.0-or-later"]] |
96 | } | 98 | } |
97 | 99 | ||
98 | def test_tests(self): | 100 | def test_tests(self): |
diff --git a/meta/lib/oeqa/selftest/cases/oelib/path.py b/meta/lib/oeqa/selftest/cases/oelib/path.py index a1cfa08c09..b963e447e3 100644 --- a/meta/lib/oeqa/selftest/cases/oelib/path.py +++ b/meta/lib/oeqa/selftest/cases/oelib/path.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/meta/lib/oeqa/selftest/cases/oelib/types.py b/meta/lib/oeqa/selftest/cases/oelib/types.py index 7eb49e6f95..58318b18b2 100644 --- a/meta/lib/oeqa/selftest/cases/oelib/types.py +++ b/meta/lib/oeqa/selftest/cases/oelib/types.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py index a7214beb4c..0cb46425a0 100644 --- a/meta/lib/oeqa/selftest/cases/oelib/utils.py +++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -64,7 +66,7 @@ class TestMultiprocessLaunch(TestCase): | |||
64 | import bb | 66 | import bb |
65 | 67 | ||
66 | def testfunction(item, d): | 68 | def testfunction(item, d): |
67 | if item == "2" or item == "1": | 69 | if item == "2": |
68 | raise KeyError("Invalid number %s" % item) | 70 | raise KeyError("Invalid number %s" % item) |
69 | return "Found %s" % item | 71 | return "Found %s" % item |
70 | 72 | ||
@@ -99,5 +101,4 @@ class TestMultiprocessLaunch(TestCase): | |||
99 | # Assert the function prints exceptions | 101 | # Assert the function prints exceptions |
100 | with captured_output() as (out, err): | 102 | with captured_output() as (out, err): |
101 | self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,)) | 103 | self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,)) |
102 | self.assertIn("KeyError: 'Invalid number 1'", out.getvalue()) | ||
103 | self.assertIn("KeyError: 'Invalid number 2'", out.getvalue()) | 104 | self.assertIn("KeyError: 'Invalid number 2'", out.getvalue()) |
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py index 8a10ff357b..f69efccfee 100644 --- a/meta/lib/oeqa/selftest/cases/oescripts.py +++ b/meta/lib/oeqa/selftest/cases/oescripts.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -8,7 +10,7 @@ import importlib | |||
8 | import unittest | 10 | import unittest |
9 | from oeqa.selftest.case import OESelftestTestCase | 11 | from oeqa.selftest.case import OESelftestTestCase |
10 | from oeqa.selftest.cases.buildhistory import BuildhistoryBase | 12 | from oeqa.selftest.cases.buildhistory import BuildhistoryBase |
11 | from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer | 13 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var |
12 | from oeqa.utils import CommandError | 14 | from oeqa.utils import CommandError |
13 | 15 | ||
14 | class BuildhistoryDiffTests(BuildhistoryBase): | 16 | class BuildhistoryDiffTests(BuildhistoryBase): |
@@ -21,7 +23,7 @@ class BuildhistoryDiffTests(BuildhistoryBase): | |||
21 | pkgv = result.output.rstrip() | 23 | pkgv = result.output.rstrip() |
22 | result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR')) | 24 | result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR')) |
23 | expected_endlines = [ | 25 | expected_endlines = [ |
24 | "xcursor-transparent-theme-dev: RDEPENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv), | 26 | "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv), |
25 | "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv) | 27 | "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv) |
26 | ] | 28 | ] |
27 | for line in result.output.splitlines(): | 29 | for line in result.output.splitlines(): |
@@ -35,19 +37,15 @@ class BuildhistoryDiffTests(BuildhistoryBase): | |||
35 | self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines)) | 37 | self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines)) |
36 | 38 | ||
37 | @unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present") | 39 | @unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present") |
38 | class OEScriptTests(OESelftestTestCase): | 40 | class OEPybootchartguyTests(OESelftestTestCase): |
39 | 41 | ||
40 | @classmethod | 42 | @classmethod |
41 | def setUpClass(cls): | 43 | def setUpClass(cls): |
42 | super(OEScriptTests, cls).setUpClass() | 44 | super().setUpClass() |
43 | import cairo | ||
44 | bitbake("core-image-minimal -c rootfs -f") | 45 | bitbake("core-image-minimal -c rootfs -f") |
45 | cls.tmpdir = get_bb_var('TMPDIR') | 46 | cls.tmpdir = get_bb_var('TMPDIR') |
46 | cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1] | 47 | cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1] |
47 | 48 | cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts') | |
48 | scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts') | ||
49 | |||
50 | class OEPybootchartguyTests(OEScriptTests): | ||
51 | 49 | ||
52 | def test_pybootchartguy_help(self): | 50 | def test_pybootchartguy_help(self): |
53 | runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir) | 51 | runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir) |
@@ -67,7 +65,10 @@ class OEPybootchartguyTests(OEScriptTests): | |||
67 | 65 | ||
68 | class OEGitproxyTests(OESelftestTestCase): | 66 | class OEGitproxyTests(OESelftestTestCase): |
69 | 67 | ||
70 | scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts') | 68 | @classmethod |
69 | def setUpClass(cls): | ||
70 | super().setUpClass() | ||
71 | cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts') | ||
71 | 72 | ||
72 | def test_oegitproxy_help(self): | 73 | def test_oegitproxy_help(self): |
73 | try: | 74 | try: |
@@ -125,15 +126,22 @@ class OEGitproxyTests(OESelftestTestCase): | |||
125 | class OeRunNativeTest(OESelftestTestCase): | 126 | class OeRunNativeTest(OESelftestTestCase): |
126 | def test_oe_run_native(self): | 127 | def test_oe_run_native(self): |
127 | bitbake("qemu-helper-native -c addto_recipe_sysroot") | 128 | bitbake("qemu-helper-native -c addto_recipe_sysroot") |
128 | result = runCmd("oe-run-native qemu-helper-native tunctl -h") | 129 | result = runCmd("oe-run-native qemu-helper-native qemu-oe-bridge-helper --help") |
129 | self.assertIn("Delete: tunctl -d device-name [-f tun-clone-device]", result.output) | 130 | self.assertIn("Helper function to find and exec qemu-bridge-helper", result.output) |
131 | |||
132 | class OEListPackageconfigTests(OESelftestTestCase): | ||
133 | |||
134 | @classmethod | ||
135 | def setUpClass(cls): | ||
136 | super().setUpClass() | ||
137 | cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts') | ||
130 | 138 | ||
131 | class OEListPackageconfigTests(OEScriptTests): | ||
132 | #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags | 139 | #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags |
133 | def check_endlines(self, results, expected_endlines): | 140 | def check_endlines(self, results, expected_endlines): |
134 | for line in results.output.splitlines(): | 141 | for line in results.output.splitlines(): |
135 | for el in expected_endlines: | 142 | for el in expected_endlines: |
136 | if line.split() == el.split(): | 143 | if line and line.split()[0] == el.split()[0] and \ |
144 | ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())): | ||
137 | expected_endlines.remove(el) | 145 | expected_endlines.remove(el) |
138 | break | 146 | break |
139 | 147 | ||
@@ -149,8 +157,8 @@ class OEListPackageconfigTests(OEScriptTests): | |||
149 | results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir) | 157 | results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir) |
150 | expected_endlines = [] | 158 | expected_endlines = [] |
151 | expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") | 159 | expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") |
152 | expected_endlines.append("pinentry gtk2 libcap ncurses qt secret") | 160 | expected_endlines.append("pinentry gtk2 ncurses qt secret") |
153 | expected_endlines.append("tar acl") | 161 | expected_endlines.append("tar acl selinux") |
154 | 162 | ||
155 | self.check_endlines(results, expected_endlines) | 163 | self.check_endlines(results, expected_endlines) |
156 | 164 | ||
@@ -167,11 +175,10 @@ class OEListPackageconfigTests(OEScriptTests): | |||
167 | def test_packageconfig_flags_option_all(self): | 175 | def test_packageconfig_flags_option_all(self): |
168 | results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) | 176 | results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) |
169 | expected_endlines = [] | 177 | expected_endlines = [] |
170 | expected_endlines.append("pinentry-1.1.1") | 178 | expected_endlines.append("pinentry-1.2.1") |
171 | expected_endlines.append("PACKAGECONFIG ncurses libcap") | 179 | expected_endlines.append("PACKAGECONFIG ncurses") |
172 | expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") | 180 | expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") |
173 | expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") | 181 | expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") |
174 | expected_endlines.append("PACKAGECONFIG[libcap] --with-libcap, --without-libcap, libcap") | ||
175 | expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses") | 182 | expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses") |
176 | expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret") | 183 | expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret") |
177 | 184 | ||
@@ -181,7 +188,7 @@ class OEListPackageconfigTests(OEScriptTests): | |||
181 | results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir) | 188 | results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir) |
182 | expected_endlines = [] | 189 | expected_endlines = [] |
183 | expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") | 190 | expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") |
184 | expected_endlines.append("pinentry gtk2 libcap ncurses qt secret") | 191 | expected_endlines.append("pinentry gtk2 ncurses qt secret") |
185 | 192 | ||
186 | self.check_endlines(results, expected_endlines) | 193 | self.check_endlines(results, expected_endlines) |
187 | 194 | ||
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py new file mode 100644 index 0000000000..e31063567b --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/overlayfs.py | |||
@@ -0,0 +1,502 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.utils.commands import bitbake, runqemu | ||
9 | from oeqa.core.decorator import OETestTag | ||
10 | from oeqa.core.decorator.data import skipIfNotMachine | ||
11 | |||
12 | def getline_qemu(out, line): | ||
13 | for l in out.split('\n'): | ||
14 | if line in l: | ||
15 | return l | ||
16 | |||
17 | def getline(res, line): | ||
18 | return getline_qemu(res.output, line) | ||
19 | |||
20 | class OverlayFSTests(OESelftestTestCase): | ||
21 | """Overlayfs class usage tests""" | ||
22 | |||
23 | def add_overlay_conf_to_machine(self): | ||
24 | machine_inc = """ | ||
25 | OVERLAYFS_MOUNT_POINT[mnt-overlay] = "/mnt/overlay" | ||
26 | """ | ||
27 | self.set_machine_config(machine_inc) | ||
28 | |||
29 | def test_distro_features_missing(self): | ||
30 | """ | ||
31 | Summary: Check that required DISTRO_FEATURES are set | ||
32 | Expected: Fail when either systemd or overlayfs are not in DISTRO_FEATURES | ||
33 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
34 | """ | ||
35 | |||
36 | config = """ | ||
37 | IMAGE_INSTALL:append = " overlayfs-user" | ||
38 | """ | ||
39 | overlayfs_recipe_append = """ | ||
40 | inherit overlayfs | ||
41 | """ | ||
42 | self.write_config(config) | ||
43 | self.add_overlay_conf_to_machine() | ||
44 | self.write_recipeinc('overlayfs-user', overlayfs_recipe_append) | ||
45 | |||
46 | res = bitbake('core-image-minimal', ignore_status=True) | ||
47 | line = getline(res, "overlayfs-user was skipped: missing required distro features") | ||
48 | self.assertTrue("overlayfs" in res.output, msg=res.output) | ||
49 | self.assertTrue("systemd" in res.output, msg=res.output) | ||
50 | self.assertTrue("ERROR: Required build target 'core-image-minimal' has no buildable providers." in res.output, msg=res.output) | ||
51 | |||
52 | def test_not_all_units_installed(self): | ||
53 | """ | ||
54 | Summary: Test QA check that we have required mount units in the image | ||
55 | Expected: Fail because mount unit for overlay partition is not installed | ||
56 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
57 | """ | ||
58 | |||
59 | config = """ | ||
60 | IMAGE_INSTALL:append = " overlayfs-user" | ||
61 | DISTRO_FEATURES:append = " systemd overlayfs usrmerge" | ||
62 | """ | ||
63 | |||
64 | self.write_config(config) | ||
65 | self.add_overlay_conf_to_machine() | ||
66 | |||
67 | res = bitbake('core-image-minimal', ignore_status=True) | ||
68 | line = getline(res, " Mount path /mnt/overlay not found in fstab and unit mnt-overlay.mount not found in systemd unit directories") | ||
69 | self.assertTrue(line and line.startswith("WARNING:"), msg=res.output) | ||
70 | line = getline(res, "Not all mount paths and units are installed in the image") | ||
71 | self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) | ||
72 | |||
73 | def test_not_all_units_installed_but_qa_skipped(self): | ||
74 | """ | ||
75 | Summary: Test skipping the QA check | ||
76 | Expected: Image is created successfully | ||
77 | Author: Claudius Heine <ch@denx.de> | ||
78 | """ | ||
79 | |||
80 | config = """ | ||
81 | IMAGE_INSTALL:append = " overlayfs-user" | ||
82 | DISTRO_FEATURES:append = " systemd overlayfs usrmerge" | ||
83 | OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured" | ||
84 | """ | ||
85 | |||
86 | self.write_config(config) | ||
87 | self.add_overlay_conf_to_machine() | ||
88 | |||
89 | bitbake('core-image-minimal') | ||
90 | |||
91 | def test_mount_unit_not_set(self): | ||
92 | """ | ||
93 | Summary: Test whether mount unit was set properly | ||
94 | Expected: Fail because mount unit was not set | ||
95 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
96 | """ | ||
97 | |||
98 | config = """ | ||
99 | IMAGE_INSTALL:append = " overlayfs-user" | ||
100 | DISTRO_FEATURES:append = " systemd overlayfs usrmerge" | ||
101 | """ | ||
102 | |||
103 | self.write_config(config) | ||
104 | |||
105 | res = bitbake('core-image-minimal', ignore_status=True) | ||
106 | line = getline(res, "A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration") | ||
107 | self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output) | ||
108 | |||
109 | def test_wrong_mount_unit_set(self): | ||
110 | """ | ||
111 | Summary: Test whether mount unit was set properly | ||
112 | Expected: Fail because not the correct flag used for mount unit | ||
113 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
114 | """ | ||
115 | |||
116 | config = """ | ||
117 | IMAGE_INSTALL:append = " overlayfs-user" | ||
118 | DISTRO_FEATURES:append = " systemd overlayfs usrmerge" | ||
119 | """ | ||
120 | |||
121 | wrong_machine_config = """ | ||
122 | OVERLAYFS_MOUNT_POINT[usr-share-overlay] = "/usr/share/overlay" | ||
123 | """ | ||
124 | |||
125 | self.write_config(config) | ||
126 | self.set_machine_config(wrong_machine_config) | ||
127 | |||
128 | res = bitbake('core-image-minimal', ignore_status=True) | ||
129 | line = getline(res, "Missing required mount point for OVERLAYFS_MOUNT_POINT[mnt-overlay] in your MACHINE configuration") | ||
130 | self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output) | ||
131 | |||
132 | def _test_correct_image(self, recipe, data): | ||
133 | """ | ||
134 | Summary: Check that we can create an image when all parameters are | ||
135 | set correctly | ||
136 | Expected: Image is created successfully | ||
137 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
138 | """ | ||
139 | |||
140 | config = """ | ||
141 | IMAGE_INSTALL:append = " overlayfs-user systemd-machine-units" | ||
142 | DISTRO_FEATURES:append = " overlayfs" | ||
143 | |||
144 | # Use systemd as init manager | ||
145 | INIT_MANAGER = "systemd" | ||
146 | |||
147 | # enable overlayfs in the kernel | ||
148 | KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc" | ||
149 | """ | ||
150 | |||
151 | overlayfs_recipe_append = """ | ||
152 | OVERLAYFS_WRITABLE_PATHS[mnt-overlay] += "/usr/share/another-overlay-mount" | ||
153 | |||
154 | SYSTEMD_SERVICE:${PN} += " \ | ||
155 | my-application.service \ | ||
156 | " | ||
157 | |||
158 | do_install:append() { | ||
159 | install -d ${D}${systemd_system_unitdir} | ||
160 | cat <<EOT > ${D}${systemd_system_unitdir}/my-application.service | ||
161 | [Unit] | ||
162 | Description=Sample application start-up unit | ||
163 | After=overlayfs-user-overlays.service | ||
164 | Requires=overlayfs-user-overlays.service | ||
165 | |||
166 | [Service] | ||
167 | Type=oneshot | ||
168 | ExecStart=/bin/true | ||
169 | RemainAfterExit=true | ||
170 | |||
171 | [Install] | ||
172 | WantedBy=multi-user.target | ||
173 | EOT | ||
174 | } | ||
175 | """ | ||
176 | |||
177 | self.write_config(config) | ||
178 | self.add_overlay_conf_to_machine() | ||
179 | self.write_recipeinc(recipe, data) | ||
180 | self.write_recipeinc('overlayfs-user', overlayfs_recipe_append) | ||
181 | |||
182 | bitbake('core-image-minimal') | ||
183 | |||
184 | with runqemu('core-image-minimal') as qemu: | ||
185 | # Check that application service started | ||
186 | status, output = qemu.run_serial("systemctl status my-application") | ||
187 | self.assertTrue("active (exited)" in output, msg=output) | ||
188 | |||
189 | # Check that overlay mounts are dependencies of our application unit | ||
190 | status, output = qemu.run_serial("systemctl list-dependencies my-application") | ||
191 | self.assertTrue("overlayfs-user-overlays.service" in output, msg=output) | ||
192 | |||
193 | status, output = qemu.run_serial("systemctl list-dependencies overlayfs-user-overlays") | ||
194 | self.assertTrue("usr-share-another\\x2doverlay\\x2dmount.mount" in output, msg=output) | ||
195 | self.assertTrue("usr-share-my\\x2dapplication.mount" in output, msg=output) | ||
196 | |||
197 | # Check that we have /mnt/overlay fs mounted as tmpfs and | ||
198 | # /usr/share/my-application as an overlay (see overlayfs-user recipe) | ||
199 | status, output = qemu.run_serial("/bin/mount -t tmpfs,overlay") | ||
200 | |||
201 | line = getline_qemu(output, "on /mnt/overlay") | ||
202 | self.assertTrue(line and line.startswith("tmpfs"), msg=output) | ||
203 | |||
204 | line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/my-application") | ||
205 | self.assertTrue(line and line.startswith("overlay"), msg=output) | ||
206 | |||
207 | line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/another-overlay-mount") | ||
208 | self.assertTrue(line and line.startswith("overlay"), msg=output) | ||
209 | |||
210 | @OETestTag("runqemu") | ||
211 | def test_correct_image_fstab(self): | ||
212 | """ | ||
213 | Summary: Check that we can create an image when all parameters are | ||
214 | set correctly via fstab | ||
215 | Expected: Image is created successfully | ||
216 | Author: Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com> | ||
217 | """ | ||
218 | |||
219 | base_files_append = """ | ||
220 | do_install:append() { | ||
221 | cat <<EOT >> ${D}${sysconfdir}/fstab | ||
222 | tmpfs /mnt/overlay tmpfs mode=1777,strictatime,nosuid,nodev 0 0 | ||
223 | EOT | ||
224 | } | ||
225 | """ | ||
226 | |||
227 | self._test_correct_image('base-files', base_files_append) | ||
228 | |||
229 | @OETestTag("runqemu") | ||
230 | def test_correct_image_unit(self): | ||
231 | """ | ||
232 | Summary: Check that we can create an image when all parameters are | ||
233 | set correctly via mount unit | ||
234 | Expected: Image is created successfully | ||
235 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
236 | """ | ||
237 | |||
238 | systemd_machine_unit_append = """ | ||
239 | SYSTEMD_SERVICE:${PN} += " \ | ||
240 | mnt-overlay.mount \ | ||
241 | " | ||
242 | |||
243 | do_install:append() { | ||
244 | install -d ${D}${systemd_system_unitdir} | ||
245 | cat <<EOT > ${D}${systemd_system_unitdir}/mnt-overlay.mount | ||
246 | [Unit] | ||
247 | Description=Tmpfs directory | ||
248 | DefaultDependencies=no | ||
249 | |||
250 | [Mount] | ||
251 | What=tmpfs | ||
252 | Where=/mnt/overlay | ||
253 | Type=tmpfs | ||
254 | Options=mode=1777,strictatime,nosuid,nodev | ||
255 | |||
256 | [Install] | ||
257 | WantedBy=multi-user.target | ||
258 | EOT | ||
259 | } | ||
260 | |||
261 | """ | ||
262 | |||
263 | self._test_correct_image('systemd-machine-units', systemd_machine_unit_append) | ||
264 | |||
265 | @OETestTag("runqemu") | ||
266 | class OverlayFSEtcRunTimeTests(OESelftestTestCase): | ||
267 | """overlayfs-etc class tests""" | ||
268 | |||
269 | def test_all_required_variables_set(self): | ||
270 | """ | ||
271 | Summary: Check that required variables are set | ||
272 | Expected: Fail when any of required variables is missing | ||
273 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
274 | """ | ||
275 | |||
276 | configBase = """ | ||
277 | # Use systemd as init manager | ||
278 | INIT_MANAGER = "systemd" | ||
279 | |||
280 | # enable overlayfs in the kernel | ||
281 | KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc" | ||
282 | |||
283 | # Image configuration for overlayfs-etc | ||
284 | EXTRA_IMAGE_FEATURES += "overlayfs-etc" | ||
285 | IMAGE_FEATURES:remove = "package-management" | ||
286 | """ | ||
287 | configMountPoint = """ | ||
288 | OVERLAYFS_ETC_MOUNT_POINT = "/data" | ||
289 | """ | ||
290 | configDevice = """ | ||
291 | OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p1" | ||
292 | """ | ||
293 | |||
294 | self.write_config(configBase) | ||
295 | res = bitbake('core-image-minimal', ignore_status=True) | ||
296 | line = getline(res, "OVERLAYFS_ETC_MOUNT_POINT must be set in your MACHINE configuration") | ||
297 | self.assertTrue(line, msg=res.output) | ||
298 | |||
299 | self.append_config(configMountPoint) | ||
300 | res = bitbake('core-image-minimal', ignore_status=True) | ||
301 | line = getline(res, "OVERLAYFS_ETC_DEVICE must be set in your MACHINE configuration") | ||
302 | self.assertTrue(line, msg=res.output) | ||
303 | |||
304 | self.append_config(configDevice) | ||
305 | res = bitbake('core-image-minimal', ignore_status=True) | ||
306 | line = getline(res, "OVERLAYFS_ETC_FSTYPE should contain a valid file system type on /dev/mmcblk0p1") | ||
307 | self.assertTrue(line, msg=res.output) | ||
308 | |||
309 | def test_image_feature_conflict(self): | ||
310 | """ | ||
311 | Summary: Overlayfs-etc is not allowed to be used with package-management | ||
312 | Expected: Feature conflict | ||
313 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
314 | """ | ||
315 | |||
316 | config = """ | ||
317 | # Use systemd as init manager | ||
318 | INIT_MANAGER = "systemd" | ||
319 | |||
320 | # enable overlayfs in the kernel | ||
321 | KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc" | ||
322 | EXTRA_IMAGE_FEATURES += "overlayfs-etc" | ||
323 | EXTRA_IMAGE_FEATURES += "package-management" | ||
324 | """ | ||
325 | |||
326 | self.write_config(config) | ||
327 | |||
328 | res = bitbake('core-image-minimal', ignore_status=True) | ||
329 | line = getline(res, "contains conflicting IMAGE_FEATURES") | ||
330 | self.assertTrue("overlayfs-etc" in res.output, msg=res.output) | ||
331 | self.assertTrue("package-management" in res.output, msg=res.output) | ||
332 | |||
333 | # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963 | ||
334 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
335 | def test_image_feature_is_missing(self): | ||
336 | """ | ||
337 | Summary: Overlayfs-etc class is not applied when image feature is not set | ||
338 | Expected: Image is created successfully but /etc is not an overlay | ||
339 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
340 | """ | ||
341 | |||
342 | config = """ | ||
343 | # Use systemd as init manager | ||
344 | INIT_MANAGER = "systemd" | ||
345 | |||
346 | # enable overlayfs in the kernel | ||
347 | KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc" | ||
348 | |||
349 | IMAGE_FSTYPES += "wic" | ||
350 | WKS_FILE = "overlayfs_etc.wks.in" | ||
351 | |||
352 | EXTRA_IMAGE_FEATURES += "read-only-rootfs" | ||
353 | # Image configuration for overlayfs-etc | ||
354 | OVERLAYFS_ETC_MOUNT_POINT = "/data" | ||
355 | OVERLAYFS_ETC_DEVICE = "/dev/sda3" | ||
356 | OVERLAYFS_ROOTFS_TYPE = "ext4" | ||
357 | """ | ||
358 | |||
359 | self.write_config(config) | ||
360 | |||
361 | bitbake('core-image-minimal') | ||
362 | |||
363 | with runqemu('core-image-minimal', image_fstype='wic') as qemu: | ||
364 | status, output = qemu.run_serial("/bin/mount") | ||
365 | |||
366 | line = getline_qemu(output, "upperdir=/data/overlay-etc/upper") | ||
367 | self.assertFalse(line, msg=output) | ||
368 | |||
369 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
370 | def test_sbin_init_preinit(self): | ||
371 | self.run_sbin_init(False, "ext4") | ||
372 | |||
373 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
374 | def test_sbin_init_original(self): | ||
375 | self.run_sbin_init(True, "ext4") | ||
376 | |||
377 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
378 | def test_sbin_init_read_only(self): | ||
379 | self.run_sbin_init(True, "squashfs") | ||
380 | |||
381 | def run_sbin_init(self, origInit, rootfsType): | ||
382 | """ | ||
383 | Summary: Confirm we can replace original init and mount overlay on top of /etc | ||
384 | Expected: Image is created successfully and /etc is mounted as an overlay | ||
385 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
386 | """ | ||
387 | |||
388 | config = self.get_working_config() | ||
389 | |||
390 | args = { | ||
391 | 'OVERLAYFS_INIT_OPTION': "" if origInit else "init=/sbin/preinit", | ||
392 | 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True), | ||
393 | 'OVERLAYFS_ROOTFS_TYPE': rootfsType, | ||
394 | 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': int(rootfsType == "ext4") | ||
395 | } | ||
396 | |||
397 | self.write_config(config.format(**args)) | ||
398 | |||
399 | bitbake('core-image-minimal') | ||
400 | testFile = "/etc/my-test-data" | ||
401 | |||
402 | with runqemu('core-image-minimal', image_fstype='wic', discard_writes=False) as qemu: | ||
403 | status, output = qemu.run_serial("/bin/mount") | ||
404 | |||
405 | line = getline_qemu(output, "/dev/sda3") | ||
406 | self.assertTrue("/data" in output, msg=output) | ||
407 | |||
408 | line = getline_qemu(output, "upperdir=/data/overlay-etc/upper") | ||
409 | self.assertTrue(line and line.startswith("/data/overlay-etc/upper on /etc type overlay"), msg=output) | ||
410 | |||
411 | # check that lower layer is not available | ||
412 | status, output = qemu.run_serial("ls -1 /data/overlay-etc/lower") | ||
413 | line = getline_qemu(output, "No such file or directory") | ||
414 | self.assertTrue(line, msg=output) | ||
415 | |||
416 | status, output = qemu.run_serial("touch " + testFile) | ||
417 | status, output = qemu.run_serial("sync") | ||
418 | status, output = qemu.run_serial("ls -1 " + testFile) | ||
419 | line = getline_qemu(output, testFile) | ||
420 | self.assertTrue(line and line.startswith(testFile), msg=output) | ||
421 | |||
422 | # Check that file exists in /etc after reboot | ||
423 | with runqemu('core-image-minimal', image_fstype='wic') as qemu: | ||
424 | status, output = qemu.run_serial("ls -1 " + testFile) | ||
425 | line = getline_qemu(output, testFile) | ||
426 | self.assertTrue(line and line.startswith(testFile), msg=output) | ||
427 | |||
428 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
429 | def test_lower_layer_access(self): | ||
430 | """ | ||
431 | Summary: Test that lower layer of /etc is available read-only when configured | ||
432 | Expected: Can't write to lower layer. The files on lower and upper different after | ||
433 | modification | ||
434 | Author: Vyacheslav Yurkov <uvv.mail@gmail.com> | ||
435 | """ | ||
436 | |||
437 | config = self.get_working_config() | ||
438 | |||
439 | configLower = """ | ||
440 | OVERLAYFS_ETC_EXPOSE_LOWER = "1" | ||
441 | IMAGE_INSTALL:append = " overlayfs-user" | ||
442 | """ | ||
443 | testFile = "lower-layer-test.txt" | ||
444 | |||
445 | args = { | ||
446 | 'OVERLAYFS_INIT_OPTION': "", | ||
447 | 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1, | ||
448 | 'OVERLAYFS_ROOTFS_TYPE': "ext4", | ||
449 | 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1 | ||
450 | } | ||
451 | |||
452 | self.write_config(config.format(**args)) | ||
453 | |||
454 | self.append_config(configLower) | ||
455 | bitbake('core-image-minimal') | ||
456 | |||
457 | with runqemu('core-image-minimal', image_fstype='wic') as qemu: | ||
458 | status, output = qemu.run_serial("echo \"Modified in upper\" > /etc/" + testFile) | ||
459 | status, output = qemu.run_serial("diff /etc/" + testFile + " /data/overlay-etc/lower/" + testFile) | ||
460 | line = getline_qemu(output, "Modified in upper") | ||
461 | self.assertTrue(line, msg=output) | ||
462 | line = getline_qemu(output, "Original file") | ||
463 | self.assertTrue(line, msg=output) | ||
464 | |||
465 | status, output = qemu.run_serial("touch /data/overlay-etc/lower/ro-test.txt") | ||
466 | line = getline_qemu(output, "Read-only file system") | ||
467 | self.assertTrue(line, msg=output) | ||
468 | |||
469 | def get_working_config(self): | ||
470 | return """ | ||
471 | # Use systemd as init manager | ||
472 | INIT_MANAGER = "systemd" | ||
473 | |||
474 | # enable overlayfs in the kernel | ||
475 | KERNEL_EXTRA_FEATURES:append = " \ | ||
476 | features/overlayfs/overlayfs.scc \ | ||
477 | cfg/fs/squashfs.scc" | ||
478 | |||
479 | IMAGE_FSTYPES += "wic" | ||
480 | OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}" | ||
481 | OVERLAYFS_ROOTFS_TYPE = "{OVERLAYFS_ROOTFS_TYPE}" | ||
482 | OVERLAYFS_ETC_CREATE_MOUNT_DIRS = "{OVERLAYFS_ETC_CREATE_MOUNT_DIRS}" | ||
483 | WKS_FILE = "overlayfs_etc.wks.in" | ||
484 | |||
485 | EXTRA_IMAGE_FEATURES += "read-only-rootfs" | ||
486 | # Image configuration for overlayfs-etc | ||
487 | EXTRA_IMAGE_FEATURES += "overlayfs-etc" | ||
488 | IMAGE_FEATURES:remove = "package-management" | ||
489 | OVERLAYFS_ETC_MOUNT_POINT = "/data" | ||
490 | OVERLAYFS_ETC_FSTYPE = "ext4" | ||
491 | OVERLAYFS_ETC_DEVICE = "/dev/sda3" | ||
492 | OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}" | ||
493 | |||
494 | ROOTFS_POSTPROCESS_COMMAND += "{OVERLAYFS_ROOTFS_TYPE}_rootfs" | ||
495 | |||
496 | ext4_rootfs() {{ | ||
497 | }} | ||
498 | |||
499 | squashfs_rootfs() {{ | ||
500 | mkdir -p ${{IMAGE_ROOTFS}}/data | ||
501 | }} | ||
502 | """ | ||
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py index 7166c3991f..1aa6c03f8a 100644 --- a/meta/lib/oeqa/selftest/cases/package.py +++ b/meta/lib/oeqa/selftest/cases/package.py | |||
@@ -1,10 +1,11 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | from oeqa.selftest.case import OESelftestTestCase | 7 | from oeqa.selftest.case import OESelftestTestCase |
6 | from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu | 8 | from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu |
7 | import stat | ||
8 | import subprocess, os | 9 | import subprocess, os |
9 | import oe.path | 10 | import oe.path |
10 | import re | 11 | import re |
@@ -88,6 +89,13 @@ class VersionOrdering(OESelftestTestCase): | |||
88 | self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort)) | 89 | self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort)) |
89 | 90 | ||
90 | class PackageTests(OESelftestTestCase): | 91 | class PackageTests(OESelftestTestCase): |
92 | # Verify that a recipe cannot rename a package into an existing one | ||
93 | def test_package_name_conflict(self): | ||
94 | res = bitbake("packagenameconflict", ignore_status=True) | ||
95 | self.assertNotEqual(res.status, 0) | ||
96 | err = "package name already exists" | ||
97 | self.assertTrue(err in res.output) | ||
98 | |||
91 | # Verify that a recipe which sets up hardlink files has those preserved into split packages | 99 | # Verify that a recipe which sets up hardlink files has those preserved into split packages |
92 | # Also test file sparseness is preserved | 100 | # Also test file sparseness is preserved |
93 | def test_preserve_sparse_hardlinks(self): | 101 | def test_preserve_sparse_hardlinks(self): |
@@ -116,9 +124,9 @@ class PackageTests(OESelftestTestCase): | |||
116 | 124 | ||
117 | # Verify gdb to read symbols from separated debug hardlink file correctly | 125 | # Verify gdb to read symbols from separated debug hardlink file correctly |
118 | def test_gdb_hardlink_debug(self): | 126 | def test_gdb_hardlink_debug(self): |
119 | features = 'IMAGE_INSTALL_append = " selftest-hardlink"\n' | 127 | features = 'IMAGE_INSTALL:append = " selftest-hardlink"\n' |
120 | features += 'IMAGE_INSTALL_append = " selftest-hardlink-dbg"\n' | 128 | features += 'IMAGE_INSTALL:append = " selftest-hardlink-dbg"\n' |
121 | features += 'IMAGE_INSTALL_append = " selftest-hardlink-gdb"\n' | 129 | features += 'IMAGE_INSTALL:append = " selftest-hardlink-gdb"\n' |
122 | self.write_config(features) | 130 | self.write_config(features) |
123 | bitbake("core-image-minimal") | 131 | bitbake("core-image-minimal") |
124 | 132 | ||
@@ -134,8 +142,10 @@ class PackageTests(OESelftestTestCase): | |||
134 | self.logger.error("No debugging symbols found. GDB result:\n%s" % output) | 142 | self.logger.error("No debugging symbols found. GDB result:\n%s" % output) |
135 | return False | 143 | return False |
136 | 144 | ||
137 | # Check debugging symbols works correctly | 145 | # Check debugging symbols works correctly. Don't look for a |
138 | elif re.match(r"Breakpoint 1.*hello\.c.*4", l): | 146 | # source file as optimisation can put the breakpoint inside |
147 | # stdio.h. | ||
148 | elif "Breakpoint 1 at" in l: | ||
139 | return True | 149 | return True |
140 | 150 | ||
141 | self.logger.error("GDB result:\n%d: %s", status, output) | 151 | self.logger.error("GDB result:\n%d: %s", status, output) |
@@ -150,25 +160,25 @@ class PackageTests(OESelftestTestCase): | |||
150 | self.fail('GDB %s failed' % binary) | 160 | self.fail('GDB %s failed' % binary) |
151 | 161 | ||
152 | def test_preserve_ownership(self): | 162 | def test_preserve_ownership(self): |
153 | import os, stat, oe.cachedpath | 163 | features = 'IMAGE_INSTALL:append = " selftest-chown"\n' |
154 | features = 'IMAGE_INSTALL_append = " selftest-chown"\n' | ||
155 | self.write_config(features) | 164 | self.write_config(features) |
156 | bitbake("core-image-minimal") | 165 | bitbake("core-image-minimal") |
157 | 166 | ||
158 | sysconfdir = get_bb_var('sysconfdir', 'selftest-chown') | 167 | def check_ownership(qemu, expected_gid, expected_uid, path): |
159 | def check_ownership(qemu, gid, uid, path): | ||
160 | self.logger.info("Check ownership of %s", path) | 168 | self.logger.info("Check ownership of %s", path) |
161 | status, output = qemu.run_serial(r'/bin/stat -c "%U %G" ' + path, timeout=60) | 169 | status, output = qemu.run_serial('stat -c "%U %G" ' + path) |
162 | output = output.split(" ") | 170 | self.assertEqual(status, 1, "stat failed: " + output) |
163 | if output[0] != uid or output[1] != gid : | 171 | try: |
164 | self.logger.error("Incrrect ownership %s [%s:%s]", path, output[0], output[1]) | 172 | uid, gid = output.split() |
165 | return False | 173 | self.assertEqual(uid, expected_uid) |
166 | return True | 174 | self.assertEqual(gid, expected_gid) |
175 | except ValueError: | ||
176 | self.fail("Cannot parse output: " + output) | ||
167 | 177 | ||
178 | sysconfdir = get_bb_var('sysconfdir', 'selftest-chown') | ||
168 | with runqemu('core-image-minimal') as qemu: | 179 | with runqemu('core-image-minimal') as qemu: |
169 | for path in [ sysconfdir + "/selftest-chown/file", | 180 | for path in [ sysconfdir + "/selftest-chown/file", |
170 | sysconfdir + "/selftest-chown/dir", | 181 | sysconfdir + "/selftest-chown/dir", |
171 | sysconfdir + "/selftest-chown/symlink", | 182 | sysconfdir + "/selftest-chown/symlink", |
172 | sysconfdir + "/selftest-chown/fifotest/fifo"]: | 183 | sysconfdir + "/selftest-chown/fifotest/fifo"]: |
173 | if not check_ownership(qemu, "test", "test", path): | 184 | check_ownership(qemu, "test", "test", path) |
174 | self.fail('Test ownership %s failed' % path) | ||
diff --git a/meta/lib/oeqa/selftest/cases/pkgdata.py b/meta/lib/oeqa/selftest/cases/pkgdata.py index 254abc40c6..d786c33018 100644 --- a/meta/lib/oeqa/selftest/cases/pkgdata.py +++ b/meta/lib/oeqa/selftest/cases/pkgdata.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -47,8 +49,8 @@ class OePkgdataUtilTests(OESelftestTestCase): | |||
47 | self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output) | 49 | self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output) |
48 | 50 | ||
49 | def test_find_path(self): | 51 | def test_find_path(self): |
50 | result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1') | 52 | result = runCmd('oe-pkgdata-util find-path /usr/lib/libz.so.1') |
51 | self.assertEqual(result.output, 'zlib: /lib/libz.so.1') | 53 | self.assertEqual(result.output, 'zlib: /usr/lib/libz.so.1') |
52 | result = runCmd('oe-pkgdata-util find-path /usr/bin/m4') | 54 | result = runCmd('oe-pkgdata-util find-path /usr/bin/m4') |
53 | self.assertEqual(result.output, 'm4: /usr/bin/m4') | 55 | self.assertEqual(result.output, 'm4: /usr/bin/m4') |
54 | result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True) | 56 | result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True) |
@@ -120,8 +122,7 @@ class OePkgdataUtilTests(OESelftestTestCase): | |||
120 | curpkg = line.split(':')[0] | 122 | curpkg = line.split(':')[0] |
121 | files[curpkg] = [] | 123 | files[curpkg] = [] |
122 | return files | 124 | return files |
123 | bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir']) | 125 | bb_vars = get_bb_vars(['libdir', 'includedir', 'mandir']) |
124 | base_libdir = bb_vars['base_libdir'] | ||
125 | libdir = bb_vars['libdir'] | 126 | libdir = bb_vars['libdir'] |
126 | includedir = bb_vars['includedir'] | 127 | includedir = bb_vars['includedir'] |
127 | mandir = bb_vars['mandir'] | 128 | mandir = bb_vars['mandir'] |
@@ -138,7 +139,7 @@ class OePkgdataUtilTests(OESelftestTestCase): | |||
138 | self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output) | 139 | self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output) |
139 | self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) | 140 | self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
140 | self.assertGreater(len(files['libz1']), 1) | 141 | self.assertGreater(len(files['libz1']), 1) |
141 | libspec = os.path.join(base_libdir, 'libz.so.1.*') | 142 | libspec = os.path.join(libdir, 'libz.so.1.*') |
142 | found = False | 143 | found = False |
143 | for fileitem in files['libz1']: | 144 | for fileitem in files['libz1']: |
144 | if fnmatch.fnmatchcase(fileitem, libspec): | 145 | if fnmatch.fnmatchcase(fileitem, libspec): |
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py index 578b2b4dd9..8da3739c57 100644 --- a/meta/lib/oeqa/selftest/cases/prservice.py +++ b/meta/lib/oeqa/selftest/cases/prservice.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -12,6 +14,8 @@ from oeqa.selftest.case import OESelftestTestCase | |||
12 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var | 14 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var |
13 | from oeqa.utils.network import get_free_port | 15 | from oeqa.utils.network import get_free_port |
14 | 16 | ||
17 | import bb.utils | ||
18 | |||
15 | class BitbakePrTests(OESelftestTestCase): | 19 | class BitbakePrTests(OESelftestTestCase): |
16 | 20 | ||
17 | @classmethod | 21 | @classmethod |
@@ -19,6 +23,16 @@ class BitbakePrTests(OESelftestTestCase): | |||
19 | super(BitbakePrTests, cls).setUpClass() | 23 | super(BitbakePrTests, cls).setUpClass() |
20 | cls.pkgdata_dir = get_bb_var('PKGDATA_DIR') | 24 | cls.pkgdata_dir = get_bb_var('PKGDATA_DIR') |
21 | 25 | ||
26 | cls.exported_db_path = os.path.join(cls.builddir, 'export.inc') | ||
27 | cls.current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3') | ||
28 | |||
29 | def cleanup(self): | ||
30 | # Ensure any memory resident bitbake is stopped | ||
31 | bitbake("-m") | ||
32 | # Remove any existing export file or prserv database | ||
33 | bb.utils.remove(self.exported_db_path) | ||
34 | bb.utils.remove(self.current_db_path + "*") | ||
35 | |||
22 | def get_pr_version(self, package_name): | 36 | def get_pr_version(self, package_name): |
23 | package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name) | 37 | package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name) |
24 | package_data = ftools.read_file(package_data_file) | 38 | package_data = ftools.read_file(package_data_file) |
@@ -40,13 +54,14 @@ class BitbakePrTests(OESelftestTestCase): | |||
40 | return str(stamps[0]) | 54 | return str(stamps[0]) |
41 | 55 | ||
42 | def increment_package_pr(self, package_name): | 56 | def increment_package_pr(self, package_name): |
43 | inc_data = "do_package_append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now() | 57 | inc_data = "do_package:append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now() |
44 | self.write_recipeinc(package_name, inc_data) | 58 | self.write_recipeinc(package_name, inc_data) |
45 | res = bitbake(package_name, ignore_status=True) | 59 | res = bitbake(package_name, ignore_status=True) |
46 | self.delete_recipeinc(package_name) | 60 | self.delete_recipeinc(package_name) |
47 | self.assertEqual(res.status, 0, msg=res.output) | 61 | self.assertEqual(res.status, 0, msg=res.output) |
48 | 62 | ||
49 | def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'): | 63 | def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'): |
64 | self.cleanup() | ||
50 | config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type | 65 | config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type |
51 | self.write_config(config_package_data) | 66 | self.write_config(config_package_data) |
52 | config_server_data = 'PRSERV_HOST = "%s"' % pr_socket | 67 | config_server_data = 'PRSERV_HOST = "%s"' % pr_socket |
@@ -66,24 +81,24 @@ class BitbakePrTests(OESelftestTestCase): | |||
66 | self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) | 81 | self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) |
67 | self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1) | 82 | self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1) |
68 | 83 | ||
84 | self.cleanup() | ||
85 | |||
69 | def run_test_pr_export_import(self, package_name, replace_current_db=True): | 86 | def run_test_pr_export_import(self, package_name, replace_current_db=True): |
70 | self.config_pr_tests(package_name) | 87 | self.config_pr_tests(package_name) |
71 | 88 | ||
72 | self.increment_package_pr(package_name) | 89 | self.increment_package_pr(package_name) |
73 | pr_1 = self.get_pr_version(package_name) | 90 | pr_1 = self.get_pr_version(package_name) |
74 | 91 | ||
75 | exported_db_path = os.path.join(self.builddir, 'export.inc') | 92 | export_result = runCmd("bitbake-prserv-tool export %s" % self.exported_db_path, ignore_status=True) |
76 | export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True) | ||
77 | self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output) | 93 | self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output) |
78 | self.assertTrue(os.path.exists(exported_db_path)) | 94 | self.assertTrue(os.path.exists(self.exported_db_path), msg="%s didn't exist, tool output %s" % (self.exported_db_path, export_result.output)) |
79 | 95 | ||
80 | if replace_current_db: | 96 | if replace_current_db: |
81 | current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3') | 97 | self.assertTrue(os.path.exists(self.current_db_path), msg="Path to current PR Service database is invalid: %s" % self.current_db_path) |
82 | self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path) | 98 | os.remove(self.current_db_path) |
83 | os.remove(current_db_path) | ||
84 | 99 | ||
85 | import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True) | 100 | import_result = runCmd("bitbake-prserv-tool import %s" % self.exported_db_path, ignore_status=True) |
86 | os.remove(exported_db_path) | 101 | #os.remove(self.exported_db_path) |
87 | self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output) | 102 | self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output) |
88 | 103 | ||
89 | self.increment_package_pr(package_name) | 104 | self.increment_package_pr(package_name) |
@@ -91,6 +106,8 @@ class BitbakePrTests(OESelftestTestCase): | |||
91 | 106 | ||
92 | self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) | 107 | self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) |
93 | 108 | ||
109 | self.cleanup() | ||
110 | |||
94 | def test_import_export_replace_db(self): | 111 | def test_import_export_replace_db(self): |
95 | self.run_test_pr_export_import('m4') | 112 | self.run_test_pr_export_import('m4') |
96 | 113 | ||
diff --git a/meta/lib/oeqa/selftest/cases/pseudo.py b/meta/lib/oeqa/selftest/cases/pseudo.py index 33593d5ce9..3ef8786022 100644 --- a/meta/lib/oeqa/selftest/cases/pseudo.py +++ b/meta/lib/oeqa/selftest/cases/pseudo.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py index 9d56e9e1e3..aebea42502 100644 --- a/meta/lib/oeqa/selftest/cases/recipetool.py +++ b/meta/lib/oeqa/selftest/cases/recipetool.py | |||
@@ -1,7 +1,10 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
7 | import errno | ||
5 | import os | 8 | import os |
6 | import shutil | 9 | import shutil |
7 | import tempfile | 10 | import tempfile |
@@ -25,7 +28,17 @@ def tearDownModule(): | |||
25 | runCmd('rm -rf %s' % templayerdir) | 28 | runCmd('rm -rf %s' % templayerdir) |
26 | 29 | ||
27 | 30 | ||
28 | class RecipetoolBase(devtool.DevtoolBase): | 31 | def needTomllib(test): |
32 | # This test require python 3.11 or above for the tomllib module or tomli module to be installed | ||
33 | try: | ||
34 | import tomllib | ||
35 | except ImportError: | ||
36 | try: | ||
37 | import tomli | ||
38 | except ImportError: | ||
39 | test.skipTest('Test requires python 3.11 or above for tomllib module or tomli module') | ||
40 | |||
41 | class RecipetoolBase(devtool.DevtoolTestCase): | ||
29 | 42 | ||
30 | def setUpLocal(self): | 43 | def setUpLocal(self): |
31 | super(RecipetoolBase, self).setUpLocal() | 44 | super(RecipetoolBase, self).setUpLocal() |
@@ -35,6 +48,8 @@ class RecipetoolBase(devtool.DevtoolBase): | |||
35 | self.testfile = os.path.join(self.tempdir, 'testfile') | 48 | self.testfile = os.path.join(self.tempdir, 'testfile') |
36 | with open(self.testfile, 'w') as f: | 49 | with open(self.testfile, 'w') as f: |
37 | f.write('Test file\n') | 50 | f.write('Test file\n') |
51 | config = 'BBMASK += "meta-poky/recipes-core/base-files/base-files_%.bbappend"\n' | ||
52 | self.append_config(config) | ||
38 | 53 | ||
39 | def tearDownLocal(self): | 54 | def tearDownLocal(self): |
40 | runCmd('rm -rf %s/recipes-*' % self.templayerdir) | 55 | runCmd('rm -rf %s/recipes-*' % self.templayerdir) |
@@ -68,17 +83,16 @@ class RecipetoolBase(devtool.DevtoolBase): | |||
68 | return bbappendfile, result.output | 83 | return bbappendfile, result.output |
69 | 84 | ||
70 | 85 | ||
71 | class RecipetoolTests(RecipetoolBase): | 86 | class RecipetoolAppendTests(RecipetoolBase): |
72 | 87 | ||
73 | @classmethod | 88 | @classmethod |
74 | def setUpClass(cls): | 89 | def setUpClass(cls): |
75 | super(RecipetoolTests, cls).setUpClass() | 90 | super(RecipetoolAppendTests, cls).setUpClass() |
76 | # Ensure we have the right data in shlibs/pkgdata | 91 | # Ensure we have the right data in shlibs/pkgdata |
77 | cls.logger.info('Running bitbake to generate pkgdata') | 92 | cls.logger.info('Running bitbake to generate pkgdata') |
78 | bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile') | 93 | bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile') |
79 | bb_vars = get_bb_vars(['COREBASE', 'BBPATH']) | 94 | bb_vars = get_bb_vars(['COREBASE']) |
80 | cls.corebase = bb_vars['COREBASE'] | 95 | cls.corebase = bb_vars['COREBASE'] |
81 | cls.bbpath = bb_vars['BBPATH'] | ||
82 | 96 | ||
83 | def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles): | 97 | def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles): |
84 | cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options) | 98 | cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options) |
@@ -94,7 +108,7 @@ class RecipetoolTests(RecipetoolBase): | |||
94 | 108 | ||
95 | def test_recipetool_appendfile_basic(self): | 109 | def test_recipetool_appendfile_basic(self): |
96 | # Basic test | 110 | # Basic test |
97 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 111 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
98 | '\n'] | 112 | '\n'] |
99 | _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd']) | 113 | _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd']) |
100 | self.assertNotIn('WARNING: ', output) | 114 | self.assertNotIn('WARNING: ', output) |
@@ -112,11 +126,11 @@ class RecipetoolTests(RecipetoolBase): | |||
112 | # Need a test file - should be executable | 126 | # Need a test file - should be executable |
113 | testfile2 = os.path.join(self.corebase, 'oe-init-build-env') | 127 | testfile2 = os.path.join(self.corebase, 'oe-init-build-env') |
114 | testfile2name = os.path.basename(testfile2) | 128 | testfile2name = os.path.basename(testfile2) |
115 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 129 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
116 | '\n', | 130 | '\n', |
117 | 'SRC_URI += "file://%s"\n' % testfile2name, | 131 | 'SRC_URI += "file://%s"\n' % testfile2name, |
118 | '\n', | 132 | '\n', |
119 | 'do_install_append() {\n', | 133 | 'do_install:append() {\n', |
120 | ' install -d ${D}${base_bindir}\n', | 134 | ' install -d ${D}${base_bindir}\n', |
121 | ' install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name, | 135 | ' install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name, |
122 | '}\n'] | 136 | '}\n'] |
@@ -138,11 +152,11 @@ class RecipetoolTests(RecipetoolBase): | |||
138 | 152 | ||
139 | def test_recipetool_appendfile_add(self): | 153 | def test_recipetool_appendfile_add(self): |
140 | # Try arbitrary file add to a recipe | 154 | # Try arbitrary file add to a recipe |
141 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 155 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
142 | '\n', | 156 | '\n', |
143 | 'SRC_URI += "file://testfile"\n', | 157 | 'SRC_URI += "file://testfile"\n', |
144 | '\n', | 158 | '\n', |
145 | 'do_install_append() {\n', | 159 | 'do_install:append() {\n', |
146 | ' install -d ${D}${datadir}\n', | 160 | ' install -d ${D}${datadir}\n', |
147 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', | 161 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', |
148 | '}\n'] | 162 | '}\n'] |
@@ -151,13 +165,13 @@ class RecipetoolTests(RecipetoolBase): | |||
151 | # (so we're testing that, plus modifying an existing bbappend) | 165 | # (so we're testing that, plus modifying an existing bbappend) |
152 | testfile2 = os.path.join(self.corebase, 'oe-init-build-env') | 166 | testfile2 = os.path.join(self.corebase, 'oe-init-build-env') |
153 | testfile2name = os.path.basename(testfile2) | 167 | testfile2name = os.path.basename(testfile2) |
154 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 168 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
155 | '\n', | 169 | '\n', |
156 | 'SRC_URI += "file://testfile \\\n', | 170 | 'SRC_URI += "file://testfile \\\n', |
157 | ' file://%s \\\n' % testfile2name, | 171 | ' file://%s \\\n' % testfile2name, |
158 | ' "\n', | 172 | ' "\n', |
159 | '\n', | 173 | '\n', |
160 | 'do_install_append() {\n', | 174 | 'do_install:append() {\n', |
161 | ' install -d ${D}${datadir}\n', | 175 | ' install -d ${D}${datadir}\n', |
162 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', | 176 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', |
163 | ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, | 177 | ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, |
@@ -166,11 +180,11 @@ class RecipetoolTests(RecipetoolBase): | |||
166 | 180 | ||
167 | def test_recipetool_appendfile_add_bindir(self): | 181 | def test_recipetool_appendfile_add_bindir(self): |
168 | # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable | 182 | # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable |
169 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 183 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
170 | '\n', | 184 | '\n', |
171 | 'SRC_URI += "file://testfile"\n', | 185 | 'SRC_URI += "file://testfile"\n', |
172 | '\n', | 186 | '\n', |
173 | 'do_install_append() {\n', | 187 | 'do_install:append() {\n', |
174 | ' install -d ${D}${bindir}\n', | 188 | ' install -d ${D}${bindir}\n', |
175 | ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', | 189 | ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', |
176 | '}\n'] | 190 | '}\n'] |
@@ -179,13 +193,13 @@ class RecipetoolTests(RecipetoolBase): | |||
179 | 193 | ||
180 | def test_recipetool_appendfile_add_machine(self): | 194 | def test_recipetool_appendfile_add_machine(self): |
181 | # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable | 195 | # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable |
182 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 196 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
183 | '\n', | 197 | '\n', |
184 | 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n', | 198 | 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n', |
185 | '\n', | 199 | '\n', |
186 | 'SRC_URI_append_mymachine = " file://testfile"\n', | 200 | 'SRC_URI:append:mymachine = " file://testfile"\n', |
187 | '\n', | 201 | '\n', |
188 | 'do_install_append_mymachine() {\n', | 202 | 'do_install:append:mymachine() {\n', |
189 | ' install -d ${D}${datadir}\n', | 203 | ' install -d ${D}${datadir}\n', |
190 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', | 204 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', |
191 | '}\n'] | 205 | '}\n'] |
@@ -194,32 +208,32 @@ class RecipetoolTests(RecipetoolBase): | |||
194 | 208 | ||
195 | def test_recipetool_appendfile_orig(self): | 209 | def test_recipetool_appendfile_orig(self): |
196 | # A file that's in SRC_URI and in do_install with the same name | 210 | # A file that's in SRC_URI and in do_install with the same name |
197 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 211 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
198 | '\n'] | 212 | '\n'] |
199 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig']) | 213 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig']) |
200 | self.assertNotIn('WARNING: ', output) | 214 | self.assertNotIn('WARNING: ', output) |
201 | 215 | ||
202 | def test_recipetool_appendfile_todir(self): | 216 | def test_recipetool_appendfile_todir(self): |
203 | # A file that's in SRC_URI and in do_install with destination directory rather than file | 217 | # A file that's in SRC_URI and in do_install with destination directory rather than file |
204 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 218 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
205 | '\n'] | 219 | '\n'] |
206 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir']) | 220 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir']) |
207 | self.assertNotIn('WARNING: ', output) | 221 | self.assertNotIn('WARNING: ', output) |
208 | 222 | ||
209 | def test_recipetool_appendfile_renamed(self): | 223 | def test_recipetool_appendfile_renamed(self): |
210 | # A file that's in SRC_URI with a different name to the destination file | 224 | # A file that's in SRC_URI with a different name to the destination file |
211 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 225 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
212 | '\n'] | 226 | '\n'] |
213 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1']) | 227 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1']) |
214 | self.assertNotIn('WARNING: ', output) | 228 | self.assertNotIn('WARNING: ', output) |
215 | 229 | ||
216 | def test_recipetool_appendfile_subdir(self): | 230 | def test_recipetool_appendfile_subdir(self): |
217 | # A file that's in SRC_URI in a subdir | 231 | # A file that's in SRC_URI in a subdir |
218 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 232 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
219 | '\n', | 233 | '\n', |
220 | 'SRC_URI += "file://testfile"\n', | 234 | 'SRC_URI += "file://testfile"\n', |
221 | '\n', | 235 | '\n', |
222 | 'do_install_append() {\n', | 236 | 'do_install:append() {\n', |
223 | ' install -d ${D}${datadir}\n', | 237 | ' install -d ${D}${datadir}\n', |
224 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', | 238 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', |
225 | '}\n'] | 239 | '}\n'] |
@@ -228,25 +242,25 @@ class RecipetoolTests(RecipetoolBase): | |||
228 | 242 | ||
229 | def test_recipetool_appendfile_inst_glob(self): | 243 | def test_recipetool_appendfile_inst_glob(self): |
230 | # A file that's in do_install as a glob | 244 | # A file that's in do_install as a glob |
231 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 245 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
232 | '\n'] | 246 | '\n'] |
233 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile']) | 247 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile']) |
234 | self.assertNotIn('WARNING: ', output) | 248 | self.assertNotIn('WARNING: ', output) |
235 | 249 | ||
236 | def test_recipetool_appendfile_inst_todir_glob(self): | 250 | def test_recipetool_appendfile_inst_todir_glob(self): |
237 | # A file that's in do_install as a glob with destination as a directory | 251 | # A file that's in do_install as a glob with destination as a directory |
238 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 252 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
239 | '\n'] | 253 | '\n'] |
240 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile']) | 254 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile']) |
241 | self.assertNotIn('WARNING: ', output) | 255 | self.assertNotIn('WARNING: ', output) |
242 | 256 | ||
243 | def test_recipetool_appendfile_patch(self): | 257 | def test_recipetool_appendfile_patch(self): |
244 | # A file that's added by a patch in SRC_URI | 258 | # A file that's added by a patch in SRC_URI |
245 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 259 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
246 | '\n', | 260 | '\n', |
247 | 'SRC_URI += "file://testfile"\n', | 261 | 'SRC_URI += "file://testfile"\n', |
248 | '\n', | 262 | '\n', |
249 | 'do_install_append() {\n', | 263 | 'do_install:append() {\n', |
250 | ' install -d ${D}${sysconfdir}\n', | 264 | ' install -d ${D}${sysconfdir}\n', |
251 | ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', | 265 | ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', |
252 | '}\n'] | 266 | '}\n'] |
@@ -260,11 +274,11 @@ class RecipetoolTests(RecipetoolBase): | |||
260 | 274 | ||
261 | def test_recipetool_appendfile_script(self): | 275 | def test_recipetool_appendfile_script(self): |
262 | # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install) | 276 | # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install) |
263 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 277 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
264 | '\n', | 278 | '\n', |
265 | 'SRC_URI += "file://testfile"\n', | 279 | 'SRC_URI += "file://testfile"\n', |
266 | '\n', | 280 | '\n', |
267 | 'do_install_append() {\n', | 281 | 'do_install:append() {\n', |
268 | ' install -d ${D}${datadir}\n', | 282 | ' install -d ${D}${datadir}\n', |
269 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', | 283 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', |
270 | '}\n'] | 284 | '}\n'] |
@@ -273,7 +287,7 @@ class RecipetoolTests(RecipetoolBase): | |||
273 | 287 | ||
274 | def test_recipetool_appendfile_inst_func(self): | 288 | def test_recipetool_appendfile_inst_func(self): |
275 | # A file that's installed from a function called by do_install | 289 | # A file that's installed from a function called by do_install |
276 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 290 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
277 | '\n'] | 291 | '\n'] |
278 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func']) | 292 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func']) |
279 | self.assertNotIn('WARNING: ', output) | 293 | self.assertNotIn('WARNING: ', output) |
@@ -283,11 +297,11 @@ class RecipetoolTests(RecipetoolBase): | |||
283 | # First try without specifying recipe | 297 | # First try without specifying recipe |
284 | self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile']) | 298 | self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile']) |
285 | # Now specify recipe | 299 | # Now specify recipe |
286 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 300 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
287 | '\n', | 301 | '\n', |
288 | 'SRC_URI += "file://testfile"\n', | 302 | 'SRC_URI += "file://testfile"\n', |
289 | '\n', | 303 | '\n', |
290 | 'do_install_append() {\n', | 304 | 'do_install:append() {\n', |
291 | ' install -d ${D}${datadir}\n', | 305 | ' install -d ${D}${datadir}\n', |
292 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', | 306 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', |
293 | '}\n'] | 307 | '}\n'] |
@@ -332,6 +346,9 @@ class RecipetoolTests(RecipetoolBase): | |||
332 | filename = try_appendfile_wc('-w') | 346 | filename = try_appendfile_wc('-w') |
333 | self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend') | 347 | self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend') |
334 | 348 | ||
349 | |||
350 | class RecipetoolCreateTests(RecipetoolBase): | ||
351 | |||
335 | def test_recipetool_create(self): | 352 | def test_recipetool_create(self): |
336 | # Try adding a recipe | 353 | # Try adding a recipe |
337 | tempsrc = os.path.join(self.tempdir, 'srctree') | 354 | tempsrc = os.path.join(self.tempdir, 'srctree') |
@@ -341,14 +358,13 @@ class RecipetoolTests(RecipetoolBase): | |||
341 | result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc)) | 358 | result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc)) |
342 | self.assertTrue(os.path.isfile(recipefile)) | 359 | self.assertTrue(os.path.isfile(recipefile)) |
343 | checkvars = {} | 360 | checkvars = {} |
344 | checkvars['LICENSE'] = 'GPLv2' | 361 | checkvars['LICENSE'] = 'GPL-2.0-only' |
345 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' | 362 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' |
346 | checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz' | 363 | checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz' |
347 | checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288' | ||
348 | checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07' | 364 | checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07' |
349 | self._test_recipe_contents(recipefile, checkvars, []) | 365 | self._test_recipe_contents(recipefile, checkvars, []) |
350 | 366 | ||
351 | def test_recipetool_create_git(self): | 367 | def test_recipetool_create_autotools(self): |
352 | if 'x11' not in get_bb_var('DISTRO_FEATURES'): | 368 | if 'x11' not in get_bb_var('DISTRO_FEATURES'): |
353 | self.skipTest('Test requires x11 as distro feature') | 369 | self.skipTest('Test requires x11 as distro feature') |
354 | # Ensure we have the right data in shlibs/pkgdata | 370 | # Ensure we have the right data in shlibs/pkgdata |
@@ -357,15 +373,15 @@ class RecipetoolTests(RecipetoolBase): | |||
357 | tempsrc = os.path.join(self.tempdir, 'srctree') | 373 | tempsrc = os.path.join(self.tempdir, 'srctree') |
358 | os.makedirs(tempsrc) | 374 | os.makedirs(tempsrc) |
359 | recipefile = os.path.join(self.tempdir, 'libmatchbox.bb') | 375 | recipefile = os.path.join(self.tempdir, 'libmatchbox.bb') |
360 | srcuri = 'git://git.yoctoproject.org/libmatchbox' | 376 | srcuri = 'git://git.yoctoproject.org/libmatchbox;protocol=https' |
361 | result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc]) | 377 | result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc]) |
362 | self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) | 378 | self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) |
363 | checkvars = {} | 379 | checkvars = {} |
364 | checkvars['LICENSE'] = 'LGPLv2.1' | 380 | checkvars['LICENSE'] = 'LGPL-2.1-only' |
365 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34' | 381 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34' |
366 | checkvars['S'] = '${WORKDIR}/git' | 382 | checkvars['S'] = '${WORKDIR}/git' |
367 | checkvars['PV'] = '1.11+git${SRCPV}' | 383 | checkvars['PV'] = '1.11+git' |
368 | checkvars['SRC_URI'] = srcuri | 384 | checkvars['SRC_URI'] = srcuri + ';branch=master' |
369 | checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango']) | 385 | checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango']) |
370 | inherits = ['autotools', 'pkgconfig'] | 386 | inherits = ['autotools', 'pkgconfig'] |
371 | self._test_recipe_contents(recipefile, checkvars, inherits) | 387 | self._test_recipe_contents(recipefile, checkvars, inherits) |
@@ -374,8 +390,8 @@ class RecipetoolTests(RecipetoolBase): | |||
374 | # Try adding a recipe | 390 | # Try adding a recipe |
375 | temprecipe = os.path.join(self.tempdir, 'recipe') | 391 | temprecipe = os.path.join(self.tempdir, 'recipe') |
376 | os.makedirs(temprecipe) | 392 | os.makedirs(temprecipe) |
377 | pv = '1.7.3.0' | 393 | pv = '1.7.4.1' |
378 | srcuri = 'http://www.dest-unreach.org/socat/download/socat-%s.tar.bz2' % pv | 394 | srcuri = 'http://www.dest-unreach.org/socat/download/Archive/socat-%s.tar.bz2' % pv |
379 | result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe)) | 395 | result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe)) |
380 | dirlist = os.listdir(temprecipe) | 396 | dirlist = os.listdir(temprecipe) |
381 | if len(dirlist) > 1: | 397 | if len(dirlist) > 1: |
@@ -384,7 +400,7 @@ class RecipetoolTests(RecipetoolBase): | |||
384 | self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist))) | 400 | self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist))) |
385 | self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named') | 401 | self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named') |
386 | checkvars = {} | 402 | checkvars = {} |
387 | checkvars['LICENSE'] = set(['Unknown', 'GPLv2']) | 403 | checkvars['LICENSE'] = set(['Unknown', 'GPL-2.0-only']) |
388 | checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263']) | 404 | checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263']) |
389 | # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot | 405 | # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot |
390 | checkvars['S'] = None | 406 | checkvars['S'] = None |
@@ -400,9 +416,8 @@ class RecipetoolTests(RecipetoolBase): | |||
400 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | 416 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) |
401 | self.assertTrue(os.path.isfile(recipefile)) | 417 | self.assertTrue(os.path.isfile(recipefile)) |
402 | checkvars = {} | 418 | checkvars = {} |
403 | checkvars['LICENSE'] = set(['LGPLv2.1', 'MPL-1.1']) | 419 | checkvars['LICENSE'] = set(['LGPL-2.1-only', 'MPL-1.1-only']) |
404 | checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz' | 420 | checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz' |
405 | checkvars['SRC_URI[md5sum]'] = 'cee7be0ccfc892fa433d6c837df9522a' | ||
406 | checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b' | 421 | checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b' |
407 | checkvars['DEPENDS'] = set(['boost', 'zlib']) | 422 | checkvars['DEPENDS'] = set(['boost', 'zlib']) |
408 | inherits = ['cmake'] | 423 | inherits = ['cmake'] |
@@ -424,77 +439,271 @@ class RecipetoolTests(RecipetoolBase): | |||
424 | checkvars = {} | 439 | checkvars = {} |
425 | checkvars['SUMMARY'] = 'Node Server Example' | 440 | checkvars['SUMMARY'] = 'Node Server Example' |
426 | checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme' | 441 | checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme' |
427 | checkvars['LICENSE'] = set(['MIT', 'ISC', 'Unknown']) | 442 | checkvars['LICENSE'] = 'BSD-3-Clause & ISC & MIT & Unknown' |
428 | urls = [] | 443 | urls = [] |
429 | urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}') | 444 | urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}') |
430 | urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json') | 445 | urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json') |
431 | checkvars['SRC_URI'] = set(urls) | 446 | checkvars['SRC_URI'] = set(urls) |
432 | checkvars['S'] = '${WORKDIR}/npm' | 447 | checkvars['S'] = '${WORKDIR}/npm' |
433 | checkvars['LICENSE_${PN}'] = 'MIT' | 448 | checkvars['LICENSE:${PN}'] = 'MIT' |
434 | checkvars['LICENSE_${PN}-base64'] = 'Unknown' | 449 | checkvars['LICENSE:${PN}-base64'] = 'Unknown' |
435 | checkvars['LICENSE_${PN}-accepts'] = 'MIT' | 450 | checkvars['LICENSE:${PN}-accepts'] = 'MIT' |
436 | checkvars['LICENSE_${PN}-inherits'] = 'ISC' | 451 | checkvars['LICENSE:${PN}-inherits'] = 'ISC' |
437 | inherits = ['npm'] | 452 | inherits = ['npm'] |
438 | self._test_recipe_contents(recipefile, checkvars, inherits) | 453 | self._test_recipe_contents(recipefile, checkvars, inherits) |
439 | 454 | ||
440 | def test_recipetool_create_github(self): | 455 | def test_recipetool_create_github(self): |
441 | # Basic test to see if github URL mangling works | 456 | # Basic test to see if github URL mangling works. Deliberately use an |
457 | # older release of Meson at present so we don't need a toml parser. | ||
442 | temprecipe = os.path.join(self.tempdir, 'recipe') | 458 | temprecipe = os.path.join(self.tempdir, 'recipe') |
443 | os.makedirs(temprecipe) | 459 | os.makedirs(temprecipe) |
444 | recipefile = os.path.join(temprecipe, 'meson_git.bb') | 460 | recipefile = os.path.join(temprecipe, 'python3-meson_git.bb') |
445 | srcuri = 'https://github.com/mesonbuild/meson;rev=0.32.0' | 461 | srcuri = 'https://github.com/mesonbuild/meson;rev=0.52.1' |
446 | result = runCmd(['recipetool', 'create', '-o', temprecipe, srcuri]) | 462 | cmd = ['recipetool', 'create', '-o', temprecipe, srcuri] |
447 | self.assertTrue(os.path.isfile(recipefile)) | 463 | result = runCmd(cmd) |
464 | self.assertTrue(os.path.isfile(recipefile), msg="recipe %s not created for command %s, output %s" % (recipefile, " ".join(cmd), result.output)) | ||
448 | checkvars = {} | 465 | checkvars = {} |
449 | checkvars['LICENSE'] = set(['Apache-2.0']) | 466 | checkvars['LICENSE'] = set(['Apache-2.0', "Unknown"]) |
450 | checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https' | 467 | checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=0.52' |
451 | inherits = ['setuptools3'] | 468 | inherits = ['setuptools3'] |
452 | self._test_recipe_contents(recipefile, checkvars, inherits) | 469 | self._test_recipe_contents(recipefile, checkvars, inherits) |
453 | 470 | ||
454 | def test_recipetool_create_python3_setuptools(self): | 471 | def test_recipetool_create_python3_setuptools(self): |
455 | # Test creating python3 package from tarball (using setuptools3 class) | 472 | # Test creating python3 package from tarball (using setuptools3 class) |
473 | # Use the --no-pypi switch to avoid creating a pypi enabled recipe and | ||
474 | # and check the created recipe as if it was a more general tarball | ||
456 | temprecipe = os.path.join(self.tempdir, 'recipe') | 475 | temprecipe = os.path.join(self.tempdir, 'recipe') |
457 | os.makedirs(temprecipe) | 476 | os.makedirs(temprecipe) |
458 | pn = 'python-magic' | 477 | pn = 'python-magic' |
459 | pv = '0.4.15' | 478 | pv = '0.4.15' |
460 | recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) | 479 | recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) |
461 | srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv | 480 | srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv |
462 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | 481 | result = runCmd('recipetool create --no-pypi -o %s %s' % (temprecipe, srcuri)) |
463 | self.assertTrue(os.path.isfile(recipefile)) | 482 | self.assertTrue(os.path.isfile(recipefile)) |
464 | checkvars = {} | 483 | checkvars = {} |
465 | checkvars['LICENSE'] = set(['MIT']) | 484 | checkvars['LICENSE'] = set(['MIT']) |
466 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88' | 485 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88' |
467 | checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz' | 486 | checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz' |
468 | checkvars['SRC_URI[md5sum]'] = 'e384c95a47218f66c6501cd6dd45ff59' | ||
469 | checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5' | 487 | checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5' |
470 | inherits = ['setuptools3'] | 488 | inherits = ['setuptools3'] |
471 | self._test_recipe_contents(recipefile, checkvars, inherits) | 489 | self._test_recipe_contents(recipefile, checkvars, inherits) |
472 | 490 | ||
473 | def test_recipetool_create_python3_distutils(self): | 491 | def test_recipetool_create_python3_setuptools_pypi_tarball(self): |
474 | # Test creating python3 package from tarball (using distutils3 class) | 492 | # Test creating python3 package from tarball (using setuptools3 and pypi classes) |
475 | temprecipe = os.path.join(self.tempdir, 'recipe') | 493 | temprecipe = os.path.join(self.tempdir, 'recipe') |
476 | os.makedirs(temprecipe) | 494 | os.makedirs(temprecipe) |
477 | pn = 'docutils' | 495 | pn = 'python-magic' |
478 | pv = '0.14' | 496 | pv = '0.4.15' |
479 | recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) | 497 | recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) |
480 | srcuri = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-%s.tar.gz' % pv | 498 | srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv |
481 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | 499 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) |
482 | self.assertTrue(os.path.isfile(recipefile)) | 500 | self.assertTrue(os.path.isfile(recipefile)) |
483 | checkvars = {} | 501 | checkvars = {} |
484 | checkvars['LICENSE'] = set(['PSF', '&', 'BSD', 'GPL']) | 502 | checkvars['LICENSE'] = set(['MIT']) |
485 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING.txt;md5=35a23d42b615470583563132872c97d6' | 503 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88' |
486 | checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-${PV}.tar.gz' | 504 | checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5' |
487 | checkvars['SRC_URI[md5sum]'] = 'c53768d63db3873b7d452833553469de' | 505 | checkvars['PYPI_PACKAGE'] = pn |
488 | checkvars['SRC_URI[sha256sum]'] = '51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274' | 506 | inherits = ['setuptools3', 'pypi'] |
489 | inherits = ['distutils3'] | 507 | self._test_recipe_contents(recipefile, checkvars, inherits) |
508 | |||
509 | def test_recipetool_create_python3_setuptools_pypi(self): | ||
510 | # Test creating python3 package from pypi url (using setuptools3 and pypi classes) | ||
511 | # Intentionnaly using setuptools3 class here instead of any of the pep517 class | ||
512 | # to avoid the toml dependency and allows this test to run on host autobuilders | ||
513 | # with older version of python | ||
514 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
515 | os.makedirs(temprecipe) | ||
516 | pn = 'python-magic' | ||
517 | pv = '0.4.15' | ||
518 | recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) | ||
519 | # First specify the required version in the url | ||
520 | srcuri = 'https://pypi.org/project/%s/%s' % (pn, pv) | ||
521 | runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | ||
522 | self.assertTrue(os.path.isfile(recipefile)) | ||
523 | checkvars = {} | ||
524 | checkvars['LICENSE'] = set(['MIT']) | ||
525 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88' | ||
526 | checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5' | ||
527 | checkvars['PYPI_PACKAGE'] = pn | ||
528 | inherits = ['setuptools3', "pypi"] | ||
529 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
530 | |||
531 | # Now specify the version as a recipetool parameter | ||
532 | runCmd('rm -rf %s' % recipefile) | ||
533 | self.assertFalse(os.path.isfile(recipefile)) | ||
534 | srcuri = 'https://pypi.org/project/%s' % pn | ||
535 | runCmd('recipetool create -o %s %s --version %s' % (temprecipe, srcuri, pv)) | ||
536 | self.assertTrue(os.path.isfile(recipefile)) | ||
537 | checkvars = {} | ||
538 | checkvars['LICENSE'] = set(['MIT']) | ||
539 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88' | ||
540 | checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5' | ||
541 | checkvars['PYPI_PACKAGE'] = pn | ||
542 | inherits = ['setuptools3', "pypi"] | ||
543 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
544 | |||
545 | # Now, try to grab latest version of the package, so we cannot guess the name of the recipe, | ||
546 | # unless hardcoding the latest version but it means we will need to update the test for each release, | ||
547 | # so use a regexp | ||
548 | runCmd('rm -rf %s' % recipefile) | ||
549 | self.assertFalse(os.path.isfile(recipefile)) | ||
550 | recipefile_re = r'%s_(.*)\.bb' % pn | ||
551 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | ||
552 | dirlist = os.listdir(temprecipe) | ||
553 | if len(dirlist) > 1: | ||
554 | self.fail('recipetool created more than just one file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist))) | ||
555 | if len(dirlist) < 1 or not os.path.isfile(os.path.join(temprecipe, dirlist[0])): | ||
556 | self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist))) | ||
557 | import re | ||
558 | match = re.match(recipefile_re, dirlist[0]) | ||
559 | self.assertTrue(match) | ||
560 | latest_pv = match.group(1) | ||
561 | self.assertTrue(latest_pv != pv) | ||
562 | recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, latest_pv)) | ||
563 | # Do not check LIC_FILES_CHKSUM and SRC_URI checksum here to avoid having updating the test on each release | ||
564 | checkvars = {} | ||
565 | checkvars['LICENSE'] = set(['MIT']) | ||
566 | checkvars['PYPI_PACKAGE'] = pn | ||
567 | inherits = ['setuptools3', "pypi"] | ||
568 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
569 | |||
570 | def test_recipetool_create_python3_pep517_setuptools_build_meta(self): | ||
571 | # This test require python 3.11 or above for the tomllib module or tomli module to be installed | ||
572 | needTomllib(self) | ||
573 | |||
574 | # Test creating python3 package from tarball (using setuptools.build_meta class) | ||
575 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
576 | os.makedirs(temprecipe) | ||
577 | pn = 'webcolors' | ||
578 | pv = '1.13' | ||
579 | recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv)) | ||
580 | srcuri = 'https://files.pythonhosted.org/packages/a1/fb/f95560c6a5d4469d9c49e24cf1b5d4d21ffab5608251c6020a965fb7791c/%s-%s.tar.gz' % (pn, pv) | ||
581 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | ||
582 | self.assertTrue(os.path.isfile(recipefile)) | ||
583 | checkvars = {} | ||
584 | checkvars['SUMMARY'] = 'A library for working with the color formats defined by HTML and CSS.' | ||
585 | checkvars['LICENSE'] = set(['BSD-3-Clause']) | ||
586 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=702b1ef12cf66832a88f24c8f2ee9c19' | ||
587 | checkvars['SRC_URI[sha256sum]'] = 'c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a' | ||
588 | inherits = ['python_setuptools_build_meta', 'pypi'] | ||
589 | |||
590 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
591 | |||
592 | def test_recipetool_create_python3_pep517_poetry_core_masonry_api(self): | ||
593 | # This test require python 3.11 or above for the tomllib module or tomli module to be installed | ||
594 | needTomllib(self) | ||
595 | |||
596 | # Test creating python3 package from tarball (using poetry.core.masonry.api class) | ||
597 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
598 | os.makedirs(temprecipe) | ||
599 | pn = 'iso8601' | ||
600 | pv = '2.1.0' | ||
601 | recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv)) | ||
602 | srcuri = 'https://files.pythonhosted.org/packages/b9/f3/ef59cee614d5e0accf6fd0cbba025b93b272e626ca89fb70a3e9187c5d15/%s-%s.tar.gz' % (pn, pv) | ||
603 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | ||
604 | self.assertTrue(os.path.isfile(recipefile)) | ||
605 | checkvars = {} | ||
606 | checkvars['SUMMARY'] = 'Simple module to parse ISO 8601 dates' | ||
607 | checkvars['LICENSE'] = set(['MIT']) | ||
608 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=aab31f2ef7ba214a5a341eaa47a7f367' | ||
609 | checkvars['SRC_URI[sha256sum]'] = '6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df' | ||
610 | inherits = ['python_poetry_core', 'pypi'] | ||
611 | |||
612 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
613 | |||
614 | def test_recipetool_create_python3_pep517_flit_core_buildapi(self): | ||
615 | # This test require python 3.11 or above for the tomllib module or tomli module to be installed | ||
616 | needTomllib(self) | ||
617 | |||
618 | # Test creating python3 package from tarball (using flit_core.buildapi class) | ||
619 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
620 | os.makedirs(temprecipe) | ||
621 | pn = 'typing-extensions' | ||
622 | pv = '4.8.0' | ||
623 | recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv)) | ||
624 | srcuri = 'https://files.pythonhosted.org/packages/1f/7a/8b94bb016069caa12fc9f587b28080ac33b4fbb8ca369b98bc0a4828543e/typing_extensions-%s.tar.gz' % pv | ||
625 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | ||
626 | self.assertTrue(os.path.isfile(recipefile)) | ||
627 | checkvars = {} | ||
628 | checkvars['SUMMARY'] = 'Backported and Experimental Type Hints for Python 3.8+' | ||
629 | checkvars['LICENSE'] = set(['PSF-2.0']) | ||
630 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2' | ||
631 | checkvars['SRC_URI[sha256sum]'] = 'df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef' | ||
632 | inherits = ['python_flit_core', 'pypi'] | ||
633 | |||
634 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
635 | |||
636 | def test_recipetool_create_python3_pep517_hatchling(self): | ||
637 | # This test require python 3.11 or above for the tomllib module or tomli module to be installed | ||
638 | needTomllib(self) | ||
639 | |||
640 | # Test creating python3 package from tarball (using hatchling class) | ||
641 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
642 | os.makedirs(temprecipe) | ||
643 | pn = 'jsonschema' | ||
644 | pv = '4.19.1' | ||
645 | recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv)) | ||
646 | srcuri = 'https://files.pythonhosted.org/packages/e4/43/087b24516db11722c8687e0caf0f66c7785c0b1c51b0ab951dfde924e3f5/jsonschema-%s.tar.gz' % pv | ||
647 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | ||
648 | self.assertTrue(os.path.isfile(recipefile)) | ||
649 | checkvars = {} | ||
650 | checkvars['SUMMARY'] = 'An implementation of JSON Schema validation for Python' | ||
651 | checkvars['HOMEPAGE'] = 'https://github.com/python-jsonschema/jsonschema' | ||
652 | checkvars['LICENSE'] = set(['MIT']) | ||
653 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af' | ||
654 | checkvars['SRC_URI[sha256sum]'] = 'ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf' | ||
655 | inherits = ['python_hatchling', 'pypi'] | ||
656 | |||
657 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
658 | |||
659 | def test_recipetool_create_python3_pep517_maturin(self): | ||
660 | # This test require python 3.11 or above for the tomllib module or tomli module to be installed | ||
661 | needTomllib(self) | ||
662 | |||
663 | # Test creating python3 package from tarball (using maturin class) | ||
664 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
665 | os.makedirs(temprecipe) | ||
666 | pn = 'pydantic-core' | ||
667 | pv = '2.14.5' | ||
668 | recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv)) | ||
669 | srcuri = 'https://files.pythonhosted.org/packages/64/26/cffb93fe9c6b5a91c497f37fae14a4b073ecbc47fc36a9979c7aa888b245/pydantic_core-%s.tar.gz' % pv | ||
670 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | ||
671 | self.assertTrue(os.path.isfile(recipefile)) | ||
672 | checkvars = {} | ||
673 | checkvars['HOMEPAGE'] = 'https://github.com/pydantic/pydantic-core' | ||
674 | checkvars['LICENSE'] = set(['MIT']) | ||
675 | checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=ab599c188b4a314d2856b3a55030c75c' | ||
676 | checkvars['SRC_URI[sha256sum]'] = '6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71' | ||
677 | inherits = ['python_maturin', 'pypi'] | ||
678 | |||
679 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
680 | |||
681 | def test_recipetool_create_python3_pep517_mesonpy(self): | ||
682 | # This test require python 3.11 or above for the tomllib module or tomli module to be installed | ||
683 | needTomllib(self) | ||
684 | |||
685 | # Test creating python3 package from tarball (using mesonpy class) | ||
686 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
687 | os.makedirs(temprecipe) | ||
688 | pn = 'siphash24' | ||
689 | pv = '1.4' | ||
690 | recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv)) | ||
691 | srcuri = 'https://files.pythonhosted.org/packages/c2/32/b934a70592f314afcfa86c7f7e388804a8061be65b822e2aa07e573b6477/%s-%s.tar.gz' % (pn, pv) | ||
692 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | ||
693 | self.assertTrue(os.path.isfile(recipefile)) | ||
694 | checkvars = {} | ||
695 | checkvars['SRC_URI[sha256sum]'] = '7fd65e39b2a7c8c4ddc3a168a687f4610751b0ac2ebb518783c0cdfc30bec4a0' | ||
696 | inherits = ['python_mesonpy', 'pypi'] | ||
697 | |||
490 | self._test_recipe_contents(recipefile, checkvars, inherits) | 698 | self._test_recipe_contents(recipefile, checkvars, inherits) |
491 | 699 | ||
492 | def test_recipetool_create_github_tarball(self): | 700 | def test_recipetool_create_github_tarball(self): |
493 | # Basic test to ensure github URL mangling doesn't apply to release tarballs | 701 | # Basic test to ensure github URL mangling doesn't apply to release tarballs. |
702 | # Deliberately use an older release of Meson at present so we don't need a toml parser. | ||
494 | temprecipe = os.path.join(self.tempdir, 'recipe') | 703 | temprecipe = os.path.join(self.tempdir, 'recipe') |
495 | os.makedirs(temprecipe) | 704 | os.makedirs(temprecipe) |
496 | pv = '0.32.0' | 705 | pv = '0.52.1' |
497 | recipefile = os.path.join(temprecipe, 'meson_%s.bb' % pv) | 706 | recipefile = os.path.join(temprecipe, 'python3-meson_%s.bb' % pv) |
498 | srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv) | 707 | srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv) |
499 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | 708 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) |
500 | self.assertTrue(os.path.isfile(recipefile)) | 709 | self.assertTrue(os.path.isfile(recipefile)) |
@@ -504,27 +713,302 @@ class RecipetoolTests(RecipetoolBase): | |||
504 | inherits = ['setuptools3'] | 713 | inherits = ['setuptools3'] |
505 | self._test_recipe_contents(recipefile, checkvars, inherits) | 714 | self._test_recipe_contents(recipefile, checkvars, inherits) |
506 | 715 | ||
507 | def test_recipetool_create_git_http(self): | 716 | def _test_recipetool_create_git(self, srcuri, branch=None): |
508 | # Basic test to check http git URL mangling works | 717 | # Basic test to check http git URL mangling works |
509 | temprecipe = os.path.join(self.tempdir, 'recipe') | 718 | temprecipe = os.path.join(self.tempdir, 'recipe') |
510 | os.makedirs(temprecipe) | 719 | os.makedirs(temprecipe) |
511 | recipefile = os.path.join(temprecipe, 'matchbox-terminal_git.bb') | 720 | name = srcuri.split(';')[0].split('/')[-1] |
512 | srcuri = 'http://git.yoctoproject.org/git/matchbox-terminal' | 721 | recipefile = os.path.join(temprecipe, name + '_git.bb') |
513 | result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) | 722 | options = ' -B %s' % branch if branch else '' |
723 | result = runCmd('recipetool create -o %s%s "%s"' % (temprecipe, options, srcuri)) | ||
514 | self.assertTrue(os.path.isfile(recipefile)) | 724 | self.assertTrue(os.path.isfile(recipefile)) |
515 | checkvars = {} | 725 | checkvars = {} |
516 | checkvars['LICENSE'] = set(['GPLv2']) | 726 | checkvars['SRC_URI'] = srcuri |
517 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http' | 727 | for scheme in ['http', 'https']: |
518 | inherits = ['pkgconfig', 'autotools'] | 728 | if srcuri.startswith(scheme + ":"): |
729 | checkvars['SRC_URI'] = 'git%s;protocol=%s' % (srcuri[len(scheme):], scheme) | ||
730 | if ';branch=' not in srcuri: | ||
731 | checkvars['SRC_URI'] += ';branch=' + (branch or 'master') | ||
732 | self._test_recipe_contents(recipefile, checkvars, []) | ||
733 | |||
734 | def test_recipetool_create_git_http(self): | ||
735 | self._test_recipetool_create_git('http://git.yoctoproject.org/git/matchbox-keyboard') | ||
736 | |||
737 | def test_recipetool_create_git_srcuri_master(self): | ||
738 | self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master;protocol=https') | ||
739 | |||
740 | def test_recipetool_create_git_srcuri_branch(self): | ||
741 | self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1;protocol=https') | ||
742 | |||
743 | def test_recipetool_create_git_srcbranch(self): | ||
744 | self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;protocol=https', 'matchbox-keyboard-0-1') | ||
745 | |||
746 | def _go_urifiy(self, url, version, modulepath = None, pathmajor = None, subdir = None): | ||
747 | modulepath = ",path='%s'" % modulepath if len(modulepath) else '' | ||
748 | pathmajor = ",pathmajor='%s'" % pathmajor if len(pathmajor) else '' | ||
749 | subdir = ",subdir='%s'" % subdir if len(subdir) else '' | ||
750 | return "${@go_src_uri('%s','%s'%s%s%s)}" % (url, version, modulepath, pathmajor, subdir) | ||
751 | |||
752 | def test_recipetool_create_go(self): | ||
753 | # Basic test to check go recipe generation | ||
754 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
755 | os.makedirs(temprecipe) | ||
756 | |||
757 | recipefile = os.path.join(temprecipe, 'edgex-go_git.bb') | ||
758 | deps_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-modules.inc') | ||
759 | lics_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-licenses.inc') | ||
760 | modules_txt_file = os.path.join(temprecipe, 'edgex-go', 'modules.txt') | ||
761 | |||
762 | srcuri = 'https://github.com/edgexfoundry/edgex-go.git' | ||
763 | srcrev = "v3.0.0" | ||
764 | srcbranch = "main" | ||
765 | |||
766 | result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch)) | ||
767 | |||
768 | self.maxDiff = None | ||
769 | inherits = ['go-vendor'] | ||
770 | |||
771 | checkvars = {} | ||
772 | checkvars['GO_IMPORT'] = "github.com/edgexfoundry/edgex-go" | ||
773 | checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https', | ||
774 | 'file://modules.txt'} | ||
775 | checkvars['LIC_FILES_CHKSUM'] = {'file://src/${GO_IMPORT}/LICENSE;md5=8f8bc924cf73f6a32381e5fd4c58d603'} | ||
776 | |||
777 | self.assertTrue(os.path.isfile(recipefile)) | ||
778 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
779 | |||
780 | checkvars = {} | ||
781 | checkvars['VENDORED_LIC_FILES_CHKSUM'] = set( | ||
782 | ['file://src/${GO_IMPORT}/vendor/github.com/Microsoft/go-winio/LICENSE;md5=69205ff73858f2c22b2ca135b557e8ef', | ||
783 | 'file://src/${GO_IMPORT}/vendor/github.com/armon/go-metrics/LICENSE;md5=d2d77030c0183e3d1e66d26dc1f243be', | ||
784 | 'file://src/${GO_IMPORT}/vendor/github.com/cenkalti/backoff/LICENSE;md5=1571d94433e3f3aa05267efd4dbea68b', | ||
785 | 'file://src/${GO_IMPORT}/vendor/github.com/davecgh/go-spew/LICENSE;md5=c06795ed54b2a35ebeeb543cd3a73e56', | ||
786 | 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE;md5=dcdb33474b60c38efd27356d8f2edec7', | ||
787 | 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10;md5=3adfcc70f5aeb7a44f3f9b495aa1fbf3', | ||
788 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-bootstrap/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
789 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-configuration/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
790 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-core-contracts/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
791 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-messaging/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
792 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-registry/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
793 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-secrets/v3/LICENSE;md5=f9fa2f4f8e0ef8cc7b5dd150963eb457', | ||
794 | 'file://src/${GO_IMPORT}/vendor/github.com/fatih/color/LICENSE.md;md5=316e6d590bdcde7993fb175662c0dd5a', | ||
795 | 'file://src/${GO_IMPORT}/vendor/github.com/fxamacker/cbor/v2/LICENSE;md5=827f5a2fa861382d35a3943adf9ebb86', | ||
796 | 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57', | ||
797 | 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/json/LICENSE;md5=591778525c869cdde0ab5a1bf283cd81', | ||
798 | 'file://src/${GO_IMPORT}/vendor/github.com/go-kit/log/LICENSE;md5=5b7c15ad5fffe2ff6e9d58a6c161f082', | ||
799 | 'file://src/${GO_IMPORT}/vendor/github.com/go-logfmt/logfmt/LICENSE;md5=98e39517c38127f969de33057067091e', | ||
800 | 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/locales/LICENSE;md5=3ccbda375ee345400ad1da85ba522301', | ||
801 | 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/universal-translator/LICENSE;md5=2e2b21ef8f61057977d27c727c84bef1', | ||
802 | 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/validator/v10/LICENSE;md5=a718a0f318d76f7c5d510cbae84f0b60', | ||
803 | 'file://src/${GO_IMPORT}/vendor/github.com/go-redis/redis/v7/LICENSE;md5=58103aa5ea1ee9b7a369c9c4a95ef9b5', | ||
804 | 'file://src/${GO_IMPORT}/vendor/github.com/golang/protobuf/LICENSE;md5=939cce1ec101726fa754e698ac871622', | ||
805 | 'file://src/${GO_IMPORT}/vendor/github.com/gomodule/redigo/LICENSE;md5=2ee41112a44fe7014dce33e26468ba93', | ||
806 | 'file://src/${GO_IMPORT}/vendor/github.com/google/uuid/LICENSE;md5=88073b6dd8ec00fe09da59e0b6dfded1', | ||
807 | 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/mux/LICENSE;md5=33fa1116c45f9e8de714033f99edde13', | ||
808 | 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/websocket/LICENSE;md5=c007b54a1743d596f46b2748d9f8c044', | ||
809 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/consul/api/LICENSE;md5=b8a277a612171b7526e9be072f405ef4', | ||
810 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/errwrap/LICENSE;md5=b278a92d2c1509760384428817710378', | ||
811 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-cleanhttp/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea', | ||
812 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-hclog/LICENSE;md5=ec7f605b74b9ad03347d0a93a5cc7eb8', | ||
813 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-immutable-radix/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea', | ||
814 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-multierror/LICENSE;md5=d44fdeb607e2d2614db9464dbedd4094', | ||
815 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-rootcerts/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea', | ||
816 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/golang-lru/LICENSE;md5=f27a50d2e878867827842f2c60e30bfc', | ||
817 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/serf/LICENSE;md5=b278a92d2c1509760384428817710378', | ||
818 | 'file://src/${GO_IMPORT}/vendor/github.com/leodido/go-urn/LICENSE;md5=8f50db5538ec1148a9b3d14ed96c3418', | ||
819 | 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-colorable/LICENSE;md5=24ce168f90aec2456a73de1839037245', | ||
820 | 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-isatty/LICENSE;md5=f509beadd5a11227c27b5d2ad6c9f2c6', | ||
821 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/consulstructure/LICENSE;md5=96ada10a9e51c98c4656f2cede08c673', | ||
822 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/copystructure/LICENSE;md5=56da355a12d4821cda57b8f23ec34bc4', | ||
823 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/go-homedir/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd', | ||
824 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/mapstructure/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd', | ||
825 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/reflectwalk/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd', | ||
826 | 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nats.go/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327', | ||
827 | 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nkeys/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327', | ||
828 | 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nuid/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327', | ||
829 | 'file://src/${GO_IMPORT}/vendor/github.com/pmezard/go-difflib/LICENSE;md5=e9a2ebb8de779a07500ddecca806145e', | ||
830 | 'file://src/${GO_IMPORT}/vendor/github.com/rcrowley/go-metrics/LICENSE;md5=1bdf5d819f50f141366dabce3be1460f', | ||
831 | 'file://src/${GO_IMPORT}/vendor/github.com/spiffe/go-spiffe/v2/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327', | ||
832 | 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/objx/LICENSE;md5=d023fd31d3ca39ec61eec65a91732735', | ||
833 | 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/testify/LICENSE;md5=188f01994659f3c0d310612333d2a26f', | ||
834 | 'file://src/${GO_IMPORT}/vendor/github.com/x448/float16/LICENSE;md5=de8f8e025d57fe7ee0b67f30d571323b', | ||
835 | 'file://src/${GO_IMPORT}/vendor/github.com/zeebo/errs/LICENSE;md5=84914ab36fc0eb48edbaa53e66e8d326', | ||
836 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/crypto/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
837 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/mod/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
838 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/net/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
839 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/sync/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
840 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/sys/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
841 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/text/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
842 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/tools/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
843 | 'file://src/${GO_IMPORT}/vendor/google.golang.org/genproto/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57', | ||
844 | 'file://src/${GO_IMPORT}/vendor/google.golang.org/grpc/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57', | ||
845 | 'file://src/${GO_IMPORT}/vendor/google.golang.org/protobuf/LICENSE;md5=02d4002e9171d41a8fad93aa7faf3956', | ||
846 | 'file://src/${GO_IMPORT}/vendor/gopkg.in/eapache/queue.v1/LICENSE;md5=1bfd4408d3de090ef6b908b0cc45a316', | ||
847 | 'file://src/${GO_IMPORT}/vendor/gopkg.in/yaml.v3/LICENSE;md5=3c91c17266710e16afdbb2b6d15c761c']) | ||
848 | |||
849 | self.assertTrue(os.path.isfile(lics_require_file)) | ||
850 | self._test_recipe_contents(lics_require_file, checkvars, []) | ||
851 | |||
852 | dependencies = \ | ||
853 | [ ('github.com/eclipse/paho.mqtt.golang','v1.4.2', '', '', ''), | ||
854 | ('github.com/edgexfoundry/go-mod-bootstrap','v3.0.1','github.com/edgexfoundry/go-mod-bootstrap/v3','/v3', ''), | ||
855 | ('github.com/edgexfoundry/go-mod-configuration','v3.0.0','github.com/edgexfoundry/go-mod-configuration/v3','/v3', ''), | ||
856 | ('github.com/edgexfoundry/go-mod-core-contracts','v3.0.0','github.com/edgexfoundry/go-mod-core-contracts/v3','/v3', ''), | ||
857 | ('github.com/edgexfoundry/go-mod-messaging','v3.0.0','github.com/edgexfoundry/go-mod-messaging/v3','/v3', ''), | ||
858 | ('github.com/edgexfoundry/go-mod-secrets','v3.0.1','github.com/edgexfoundry/go-mod-secrets/v3','/v3', ''), | ||
859 | ('github.com/fxamacker/cbor','v2.4.0','github.com/fxamacker/cbor/v2','/v2', ''), | ||
860 | ('github.com/gomodule/redigo','v1.8.9', '', '', ''), | ||
861 | ('github.com/google/uuid','v1.3.0', '', '', ''), | ||
862 | ('github.com/gorilla/mux','v1.8.0', '', '', ''), | ||
863 | ('github.com/rcrowley/go-metrics','v0.0.0-20201227073835-cf1acfcdf475', '', '', ''), | ||
864 | ('github.com/spiffe/go-spiffe','v2.1.4','github.com/spiffe/go-spiffe/v2','/v2', ''), | ||
865 | ('github.com/stretchr/testify','v1.8.2', '', '', ''), | ||
866 | ('go.googlesource.com/crypto','v0.8.0','golang.org/x/crypto', '', ''), | ||
867 | ('gopkg.in/eapache/queue.v1','v1.1.0', '', '', ''), | ||
868 | ('gopkg.in/yaml.v3','v3.0.1', '', '', ''), | ||
869 | ('github.com/microsoft/go-winio','v0.6.0','github.com/Microsoft/go-winio', '', ''), | ||
870 | ('github.com/hashicorp/go-metrics','v0.3.10','github.com/armon/go-metrics', '', ''), | ||
871 | ('github.com/cenkalti/backoff','v2.2.1+incompatible', '', '', ''), | ||
872 | ('github.com/davecgh/go-spew','v1.1.1', '', '', ''), | ||
873 | ('github.com/edgexfoundry/go-mod-registry','v3.0.0','github.com/edgexfoundry/go-mod-registry/v3','/v3', ''), | ||
874 | ('github.com/fatih/color','v1.9.0', '', '', ''), | ||
875 | ('github.com/go-jose/go-jose','v3.0.0','github.com/go-jose/go-jose/v3','/v3', ''), | ||
876 | ('github.com/go-kit/log','v0.2.1', '', '', ''), | ||
877 | ('github.com/go-logfmt/logfmt','v0.5.1', '', '', ''), | ||
878 | ('github.com/go-playground/locales','v0.14.1', '', '', ''), | ||
879 | ('github.com/go-playground/universal-translator','v0.18.1', '', '', ''), | ||
880 | ('github.com/go-playground/validator','v10.13.0','github.com/go-playground/validator/v10','/v10', ''), | ||
881 | ('github.com/go-redis/redis','v7.3.0','github.com/go-redis/redis/v7','/v7', ''), | ||
882 | ('github.com/golang/protobuf','v1.5.2', '', '', ''), | ||
883 | ('github.com/gorilla/websocket','v1.4.2', '', '', ''), | ||
884 | ('github.com/hashicorp/consul','v1.20.0','github.com/hashicorp/consul/api', '', 'api'), | ||
885 | ('github.com/hashicorp/errwrap','v1.0.0', '', '', ''), | ||
886 | ('github.com/hashicorp/go-cleanhttp','v0.5.1', '', '', ''), | ||
887 | ('github.com/hashicorp/go-hclog','v0.14.1', '', '', ''), | ||
888 | ('github.com/hashicorp/go-immutable-radix','v1.3.0', '', '', ''), | ||
889 | ('github.com/hashicorp/go-multierror','v1.1.1', '', '', ''), | ||
890 | ('github.com/hashicorp/go-rootcerts','v1.0.2', '', '', ''), | ||
891 | ('github.com/hashicorp/golang-lru','v0.5.4', '', '', ''), | ||
892 | ('github.com/hashicorp/serf','v0.10.1', '', '', ''), | ||
893 | ('github.com/leodido/go-urn','v1.2.3', '', '', ''), | ||
894 | ('github.com/mattn/go-colorable','v0.1.12', '', '', ''), | ||
895 | ('github.com/mattn/go-isatty','v0.0.14', '', '', ''), | ||
896 | ('github.com/mitchellh/consulstructure','v0.0.0-20190329231841-56fdc4d2da54', '', '', ''), | ||
897 | ('github.com/mitchellh/copystructure','v1.2.0', '', '', ''), | ||
898 | ('github.com/mitchellh/go-homedir','v1.1.0', '', '', ''), | ||
899 | ('github.com/mitchellh/mapstructure','v1.5.0', '', '', ''), | ||
900 | ('github.com/mitchellh/reflectwalk','v1.0.2', '', '', ''), | ||
901 | ('github.com/nats-io/nats.go','v1.25.0', '', '', ''), | ||
902 | ('github.com/nats-io/nkeys','v0.4.4', '', '', ''), | ||
903 | ('github.com/nats-io/nuid','v1.0.1', '', '', ''), | ||
904 | ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''), | ||
905 | ('github.com/stretchr/objx','v0.5.0', '', '', ''), | ||
906 | ('github.com/x448/float16','v0.8.4', '', '', ''), | ||
907 | ('github.com/zeebo/errs','v1.3.0', '', '', ''), | ||
908 | ('go.googlesource.com/mod','v0.8.0','golang.org/x/mod', '', ''), | ||
909 | ('go.googlesource.com/net','v0.9.0','golang.org/x/net', '', ''), | ||
910 | ('go.googlesource.com/sync','v0.1.0','golang.org/x/sync', '', ''), | ||
911 | ('go.googlesource.com/sys','v0.7.0','golang.org/x/sys', '', ''), | ||
912 | ('go.googlesource.com/text','v0.9.0','golang.org/x/text', '', ''), | ||
913 | ('go.googlesource.com/tools','v0.6.0','golang.org/x/tools', '', ''), | ||
914 | ('github.com/googleapis/go-genproto','v0.0.0-20230223222841-637eb2293923','google.golang.org/genproto', '', ''), | ||
915 | ('github.com/grpc/grpc-go','v1.53.0','google.golang.org/grpc', '', ''), | ||
916 | ('go.googlesource.com/protobuf','v1.28.1','google.golang.org/protobuf', '', ''), | ||
917 | ] | ||
918 | |||
919 | src_uri = set() | ||
920 | for d in dependencies: | ||
921 | src_uri.add(self._go_urifiy(*d)) | ||
922 | |||
923 | checkvars = {} | ||
924 | checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri | ||
925 | |||
926 | self.assertTrue(os.path.isfile(deps_require_file)) | ||
927 | self._test_recipe_contents(deps_require_file, checkvars, []) | ||
928 | |||
929 | def test_recipetool_create_go_replace_modules(self): | ||
930 | # Check handling of replaced modules | ||
931 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
932 | os.makedirs(temprecipe) | ||
933 | |||
934 | recipefile = os.path.join(temprecipe, 'openapi-generator_git.bb') | ||
935 | deps_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-modules.inc') | ||
936 | lics_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-licenses.inc') | ||
937 | modules_txt_file = os.path.join(temprecipe, 'openapi-generator', 'modules.txt') | ||
938 | |||
939 | srcuri = 'https://github.com/OpenAPITools/openapi-generator.git' | ||
940 | srcrev = "v7.2.0" | ||
941 | srcbranch = "master" | ||
942 | srcsubdir = "samples/openapi3/client/petstore/go" | ||
943 | |||
944 | result = runCmd('recipetool create -o %s %s -S %s -B %s --src-subdir %s' % (temprecipe, srcuri, srcrev, srcbranch, srcsubdir)) | ||
945 | |||
946 | self.maxDiff = None | ||
947 | inherits = ['go-vendor'] | ||
948 | |||
949 | checkvars = {} | ||
950 | checkvars['GO_IMPORT'] = "github.com/OpenAPITools/openapi-generator/samples/openapi3/client/petstore/go" | ||
951 | checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https', | ||
952 | 'file://modules.txt'} | ||
953 | |||
954 | self.assertNotIn('Traceback', result.output) | ||
955 | self.assertIn('No license file was detected for the main module', result.output) | ||
956 | self.assertTrue(os.path.isfile(recipefile)) | ||
519 | self._test_recipe_contents(recipefile, checkvars, inherits) | 957 | self._test_recipe_contents(recipefile, checkvars, inherits) |
520 | 958 | ||
959 | # make sure that dependencies don't mention local directory ./go-petstore | ||
960 | dependencies = \ | ||
961 | [ ('github.com/stretchr/testify','v1.8.4', '', '', ''), | ||
962 | ('go.googlesource.com/oauth2','v0.10.0','golang.org/x/oauth2', '', ''), | ||
963 | ('github.com/davecgh/go-spew','v1.1.1', '', '', ''), | ||
964 | ('github.com/golang/protobuf','v1.5.3', '', '', ''), | ||
965 | ('github.com/kr/pretty','v0.3.0', '', '', ''), | ||
966 | ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''), | ||
967 | ('github.com/rogpeppe/go-internal','v1.9.0', '', '', ''), | ||
968 | ('go.googlesource.com/net','v0.12.0','golang.org/x/net', '', ''), | ||
969 | ('github.com/golang/appengine','v1.6.7','google.golang.org/appengine', '', ''), | ||
970 | ('go.googlesource.com/protobuf','v1.31.0','google.golang.org/protobuf', '', ''), | ||
971 | ('gopkg.in/check.v1','v1.0.0-20201130134442-10cb98267c6c', '', '', ''), | ||
972 | ('gopkg.in/yaml.v3','v3.0.1', '', '', ''), | ||
973 | ] | ||
974 | |||
975 | src_uri = set() | ||
976 | for d in dependencies: | ||
977 | src_uri.add(self._go_urifiy(*d)) | ||
978 | |||
979 | checkvars = {} | ||
980 | checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri | ||
981 | |||
982 | self.assertTrue(os.path.isfile(deps_require_file)) | ||
983 | self._test_recipe_contents(deps_require_file, checkvars, []) | ||
984 | |||
985 | class RecipetoolTests(RecipetoolBase): | ||
986 | |||
987 | @classmethod | ||
988 | def setUpClass(cls): | ||
989 | import sys | ||
990 | |||
991 | super(RecipetoolTests, cls).setUpClass() | ||
992 | bb_vars = get_bb_vars(['BBPATH']) | ||
993 | cls.bbpath = bb_vars['BBPATH'] | ||
994 | libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'recipetool') | ||
995 | sys.path.insert(0, libpath) | ||
996 | |||
521 | def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths): | 997 | def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths): |
522 | dstdir = basedstdir | 998 | dstdir = basedstdir |
523 | self.assertTrue(os.path.exists(dstdir)) | 999 | self.assertTrue(os.path.exists(dstdir)) |
524 | for p in paths: | 1000 | for p in paths: |
525 | dstdir = os.path.join(dstdir, p) | 1001 | dstdir = os.path.join(dstdir, p) |
526 | if not os.path.exists(dstdir): | 1002 | if not os.path.exists(dstdir): |
527 | os.makedirs(dstdir) | 1003 | try: |
1004 | os.makedirs(dstdir) | ||
1005 | except PermissionError: | ||
1006 | return False | ||
1007 | except OSError as e: | ||
1008 | if e.errno == errno.EROFS: | ||
1009 | return False | ||
1010 | else: | ||
1011 | raise e | ||
528 | if p == "lib": | 1012 | if p == "lib": |
529 | # Can race with other tests | 1013 | # Can race with other tests |
530 | self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) | 1014 | self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) |
@@ -532,8 +1016,12 @@ class RecipetoolTests(RecipetoolBase): | |||
532 | self.track_for_cleanup(dstdir) | 1016 | self.track_for_cleanup(dstdir) |
533 | dstfile = os.path.join(dstdir, os.path.basename(srcfile)) | 1017 | dstfile = os.path.join(dstdir, os.path.basename(srcfile)) |
534 | if srcfile != dstfile: | 1018 | if srcfile != dstfile: |
535 | shutil.copy(srcfile, dstfile) | 1019 | try: |
1020 | shutil.copy(srcfile, dstfile) | ||
1021 | except PermissionError: | ||
1022 | return False | ||
536 | self.track_for_cleanup(dstfile) | 1023 | self.track_for_cleanup(dstfile) |
1024 | return True | ||
537 | 1025 | ||
538 | def test_recipetool_load_plugin(self): | 1026 | def test_recipetool_load_plugin(self): |
539 | """Test that recipetool loads only the first found plugin in BBPATH.""" | 1027 | """Test that recipetool loads only the first found plugin in BBPATH.""" |
@@ -547,20 +1035,147 @@ class RecipetoolTests(RecipetoolBase): | |||
547 | plugincontent = fh.readlines() | 1035 | plugincontent = fh.readlines() |
548 | try: | 1036 | try: |
549 | self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') | 1037 | self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') |
550 | for path in searchpath: | 1038 | searchpath = [ |
551 | self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool') | 1039 | path for path in searchpath |
1040 | if self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool') | ||
1041 | ] | ||
552 | result = runCmd("recipetool --quiet count") | 1042 | result = runCmd("recipetool --quiet count") |
553 | self.assertEqual(result.output, '1') | 1043 | self.assertEqual(result.output, '1') |
554 | result = runCmd("recipetool --quiet multiloaded") | 1044 | result = runCmd("recipetool --quiet multiloaded") |
555 | self.assertEqual(result.output, "no") | 1045 | self.assertEqual(result.output, "no") |
556 | for path in searchpath: | 1046 | for path in searchpath: |
557 | result = runCmd("recipetool --quiet bbdir") | 1047 | result = runCmd("recipetool --quiet bbdir") |
558 | self.assertEqual(result.output, path) | 1048 | self.assertEqual(os.path.realpath(result.output), os.path.realpath(path)) |
559 | os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py')) | 1049 | os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py')) |
560 | finally: | 1050 | finally: |
561 | with open(srcfile, 'w') as fh: | 1051 | with open(srcfile, 'w') as fh: |
562 | fh.writelines(plugincontent) | 1052 | fh.writelines(plugincontent) |
563 | 1053 | ||
1054 | def test_recipetool_handle_license_vars(self): | ||
1055 | from create import handle_license_vars | ||
1056 | from unittest.mock import Mock | ||
1057 | |||
1058 | commonlicdir = get_bb_var('COMMON_LICENSE_DIR') | ||
1059 | |||
1060 | class DataConnectorCopy(bb.tinfoil.TinfoilDataStoreConnector): | ||
1061 | pass | ||
1062 | |||
1063 | d = DataConnectorCopy | ||
1064 | d.getVar = Mock(return_value=commonlicdir) | ||
1065 | |||
1066 | srctree = tempfile.mkdtemp(prefix='recipetoolqa') | ||
1067 | self.track_for_cleanup(srctree) | ||
1068 | |||
1069 | # Multiple licenses | ||
1070 | licenses = ['MIT', 'ISC', 'BSD-3-Clause', 'Apache-2.0'] | ||
1071 | for licence in licenses: | ||
1072 | shutil.copy(os.path.join(commonlicdir, licence), os.path.join(srctree, 'LICENSE.' + licence)) | ||
1073 | # Duplicate license | ||
1074 | shutil.copy(os.path.join(commonlicdir, 'MIT'), os.path.join(srctree, 'LICENSE')) | ||
1075 | |||
1076 | extravalues = { | ||
1077 | # Duplicate and missing licenses | ||
1078 | 'LICENSE': 'Zlib & BSD-2-Clause & Zlib', | ||
1079 | 'LIC_FILES_CHKSUM': [ | ||
1080 | 'file://README.md;md5=0123456789abcdef0123456789abcd' | ||
1081 | ] | ||
1082 | } | ||
1083 | lines_before = [] | ||
1084 | handled = [] | ||
1085 | licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d) | ||
1086 | expected_lines_before = [ | ||
1087 | '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is', | ||
1088 | '# your responsibility to verify that the values are complete and correct.', | ||
1089 | '# NOTE: Original package / source metadata indicates license is: BSD-2-Clause & Zlib', | ||
1090 | '#', | ||
1091 | '# NOTE: multiple licenses have been detected; they have been separated with &', | ||
1092 | '# in the LICENSE value for now since it is a reasonable assumption that all', | ||
1093 | '# of the licenses apply. If instead there is a choice between the multiple', | ||
1094 | '# licenses then you should change the value to separate the licenses with |', | ||
1095 | '# instead of &. If there is any doubt, check the accompanying documentation', | ||
1096 | '# to determine which situation is applicable.', | ||
1097 | 'LICENSE = "Apache-2.0 & BSD-2-Clause & BSD-3-Clause & ISC & MIT & Zlib"', | ||
1098 | 'LIC_FILES_CHKSUM = "file://LICENSE;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n' | ||
1099 | ' file://LICENSE.Apache-2.0;md5=89aea4e17d99a7cacdbeed46a0096b10 \\\n' | ||
1100 | ' file://LICENSE.BSD-3-Clause;md5=550794465ba0ec5312d6919e203a55f9 \\\n' | ||
1101 | ' file://LICENSE.ISC;md5=f3b90e78ea0cffb20bf5cca7947a896d \\\n' | ||
1102 | ' file://LICENSE.MIT;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n' | ||
1103 | ' file://README.md;md5=0123456789abcdef0123456789abcd"', | ||
1104 | '' | ||
1105 | ] | ||
1106 | self.assertEqual(lines_before, expected_lines_before) | ||
1107 | expected_licvalues = [ | ||
1108 | ('MIT', 'LICENSE', '0835ade698e0bcf8506ecda2f7b4f302'), | ||
1109 | ('Apache-2.0', 'LICENSE.Apache-2.0', '89aea4e17d99a7cacdbeed46a0096b10'), | ||
1110 | ('BSD-3-Clause', 'LICENSE.BSD-3-Clause', '550794465ba0ec5312d6919e203a55f9'), | ||
1111 | ('ISC', 'LICENSE.ISC', 'f3b90e78ea0cffb20bf5cca7947a896d'), | ||
1112 | ('MIT', 'LICENSE.MIT', '0835ade698e0bcf8506ecda2f7b4f302') | ||
1113 | ] | ||
1114 | self.assertEqual(handled, [('license', expected_licvalues)]) | ||
1115 | self.assertEqual(extravalues, {}) | ||
1116 | self.assertEqual(licvalues, expected_licvalues) | ||
1117 | |||
1118 | |||
1119 | def test_recipetool_split_pkg_licenses(self): | ||
1120 | from create import split_pkg_licenses | ||
1121 | licvalues = [ | ||
1122 | # Duplicate licenses | ||
1123 | ('BSD-2-Clause', 'x/COPYING', None), | ||
1124 | ('BSD-2-Clause', 'x/LICENSE', None), | ||
1125 | # Multiple licenses | ||
1126 | ('MIT', 'x/a/LICENSE.MIT', None), | ||
1127 | ('ISC', 'x/a/LICENSE.ISC', None), | ||
1128 | # Alternative licenses | ||
1129 | ('(MIT | ISC)', 'x/b/LICENSE', None), | ||
1130 | # Alternative licenses without brackets | ||
1131 | ('MIT | BSD-2-Clause', 'x/c/LICENSE', None), | ||
1132 | # Multi licenses with alternatives | ||
1133 | ('MIT', 'x/d/COPYING', None), | ||
1134 | ('MIT | BSD-2-Clause', 'x/d/LICENSE', None), | ||
1135 | # Multi licenses with alternatives and brackets | ||
1136 | ('Apache-2.0 & ((MIT | ISC) & BSD-3-Clause)', 'x/e/LICENSE', None) | ||
1137 | ] | ||
1138 | packages = { | ||
1139 | '${PN}': '', | ||
1140 | 'a': 'x/a', | ||
1141 | 'b': 'x/b', | ||
1142 | 'c': 'x/c', | ||
1143 | 'd': 'x/d', | ||
1144 | 'e': 'x/e', | ||
1145 | 'f': 'x/f', | ||
1146 | 'g': 'x/g', | ||
1147 | } | ||
1148 | fallback_licenses = { | ||
1149 | # Ignored | ||
1150 | 'a': 'BSD-3-Clause', | ||
1151 | # Used | ||
1152 | 'f': 'BSD-3-Clause' | ||
1153 | } | ||
1154 | outlines = [] | ||
1155 | outlicenses = split_pkg_licenses(licvalues, packages, outlines, fallback_licenses) | ||
1156 | expected_outlicenses = { | ||
1157 | '${PN}': ['BSD-2-Clause'], | ||
1158 | 'a': ['ISC', 'MIT'], | ||
1159 | 'b': ['(ISC | MIT)'], | ||
1160 | 'c': ['(BSD-2-Clause | MIT)'], | ||
1161 | 'd': ['(BSD-2-Clause | MIT)', 'MIT'], | ||
1162 | 'e': ['(ISC | MIT)', 'Apache-2.0', 'BSD-3-Clause'], | ||
1163 | 'f': ['BSD-3-Clause'], | ||
1164 | 'g': ['Unknown'] | ||
1165 | } | ||
1166 | self.assertEqual(outlicenses, expected_outlicenses) | ||
1167 | expected_outlines = [ | ||
1168 | 'LICENSE:${PN} = "BSD-2-Clause"', | ||
1169 | 'LICENSE:a = "ISC & MIT"', | ||
1170 | 'LICENSE:b = "(ISC | MIT)"', | ||
1171 | 'LICENSE:c = "(BSD-2-Clause | MIT)"', | ||
1172 | 'LICENSE:d = "(BSD-2-Clause | MIT) & MIT"', | ||
1173 | 'LICENSE:e = "(ISC | MIT) & Apache-2.0 & BSD-3-Clause"', | ||
1174 | 'LICENSE:f = "BSD-3-Clause"', | ||
1175 | 'LICENSE:g = "Unknown"' | ||
1176 | ] | ||
1177 | self.assertEqual(outlines, expected_outlines) | ||
1178 | |||
564 | 1179 | ||
565 | class RecipetoolAppendsrcBase(RecipetoolBase): | 1180 | class RecipetoolAppendsrcBase(RecipetoolBase): |
566 | def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles): | 1181 | def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles): |
@@ -593,9 +1208,9 @@ class RecipetoolAppendsrcBase(RecipetoolBase): | |||
593 | for uri in src_uri: | 1208 | for uri in src_uri: |
594 | p = urllib.parse.urlparse(uri) | 1209 | p = urllib.parse.urlparse(uri) |
595 | if p.scheme == 'file': | 1210 | if p.scheme == 'file': |
596 | return p.netloc + p.path | 1211 | return p.netloc + p.path, uri |
597 | 1212 | ||
598 | def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''): | 1213 | def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, remove=None, machine=None , options=''): |
599 | if newfile is None: | 1214 | if newfile is None: |
600 | newfile = self.testfile | 1215 | newfile = self.testfile |
601 | 1216 | ||
@@ -620,14 +1235,42 @@ class RecipetoolAppendsrcBase(RecipetoolBase): | |||
620 | else: | 1235 | else: |
621 | destpath = '.' + os.sep | 1236 | destpath = '.' + os.sep |
622 | 1237 | ||
623 | expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', | 1238 | expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
624 | '\n'] | 1239 | '\n'] |
1240 | |||
1241 | override = "" | ||
1242 | if machine: | ||
1243 | options += ' -m %s' % machine | ||
1244 | override = ':append:%s' % machine | ||
1245 | expectedlines.extend(['PACKAGE_ARCH = "${MACHINE_ARCH}"\n', | ||
1246 | '\n']) | ||
1247 | |||
1248 | if remove: | ||
1249 | for entry in remove: | ||
1250 | if machine: | ||
1251 | entry_remove_line = 'SRC_URI:remove:%s = " %s"\n' % (machine, entry) | ||
1252 | else: | ||
1253 | entry_remove_line = 'SRC_URI:remove = "%s"\n' % entry | ||
1254 | |||
1255 | expectedlines.extend([entry_remove_line, | ||
1256 | '\n']) | ||
1257 | |||
625 | if has_src_uri: | 1258 | if has_src_uri: |
626 | uri = 'file://%s' % filename | 1259 | uri = 'file://%s' % filename |
627 | if expected_subdir: | 1260 | if expected_subdir: |
628 | uri += ';subdir=%s' % expected_subdir | 1261 | uri += ';subdir=%s' % expected_subdir |
629 | expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri, | 1262 | if machine: |
630 | '\n'] | 1263 | src_uri_line = 'SRC_URI%s = " %s"\n' % (override, uri) |
1264 | else: | ||
1265 | src_uri_line = 'SRC_URI += "%s"\n' % uri | ||
1266 | |||
1267 | expectedlines.extend([src_uri_line, '\n']) | ||
1268 | |||
1269 | with open("/tmp/tmp.txt", "w") as file: | ||
1270 | print(expectedlines, file=file) | ||
1271 | |||
1272 | if machine: | ||
1273 | filename = '%s/%s' % (machine, filename) | ||
631 | 1274 | ||
632 | return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename]) | 1275 | return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename]) |
633 | 1276 | ||
@@ -682,18 +1325,46 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase): | |||
682 | 1325 | ||
683 | def test_recipetool_appendsrcfile_existing_in_src_uri(self): | 1326 | def test_recipetool_appendsrcfile_existing_in_src_uri(self): |
684 | testrecipe = 'base-files' | 1327 | testrecipe = 'base-files' |
685 | filepath = self._get_first_file_uri(testrecipe) | 1328 | filepath,_ = self._get_first_file_uri(testrecipe) |
686 | self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) | 1329 | self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) |
687 | self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False) | 1330 | self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False) |
688 | 1331 | ||
689 | def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self): | 1332 | def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self, machine=None): |
690 | testrecipe = 'base-files' | 1333 | testrecipe = 'base-files' |
691 | subdir = 'tmp' | 1334 | subdir = 'tmp' |
692 | filepath = self._get_first_file_uri(testrecipe) | 1335 | filepath, srcuri_entry = self._get_first_file_uri(testrecipe) |
693 | self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) | 1336 | self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) |
694 | 1337 | ||
695 | output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False) | 1338 | self._test_appendsrcfile(testrecipe, filepath, subdir, machine=machine, remove=[srcuri_entry]) |
696 | self.assertTrue(any('with different parameters' in l for l in output)) | 1339 | |
1340 | def test_recipetool_appendsrcfile_machine(self): | ||
1341 | # A very basic test | ||
1342 | self._test_appendsrcfile('base-files', 'a-file', machine='mymachine') | ||
1343 | |||
1344 | # Force cleaning the output of previous test | ||
1345 | self.tearDownLocal() | ||
1346 | |||
1347 | # A more complex test: existing entry in src_uri with different param | ||
1348 | self.test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(machine='mymachine') | ||
1349 | |||
1350 | def test_recipetool_appendsrcfile_update_recipe_basic(self): | ||
1351 | testrecipe = "mtd-utils-selftest" | ||
1352 | recipefile = get_bb_var('FILE', testrecipe) | ||
1353 | self.assertIn('meta-selftest', recipefile, 'This test expect %s recipe to be in meta-selftest') | ||
1354 | cmd = 'recipetool appendsrcfile -W -u meta-selftest %s %s' % (testrecipe, self.testfile) | ||
1355 | result = runCmd(cmd) | ||
1356 | self.assertNotIn('Traceback', result.output) | ||
1357 | self.add_command_to_tearDown('cd %s; rm -f %s/%s; git checkout .' % (os.path.dirname(recipefile), testrecipe, os.path.basename(self.testfile))) | ||
1358 | |||
1359 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), | ||
1360 | ('??', '.*/%s/%s$' % (testrecipe, os.path.basename(self.testfile)))] | ||
1361 | self._check_repo_status(os.path.dirname(recipefile), expected_status) | ||
1362 | result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) | ||
1363 | removelines = [] | ||
1364 | addlines = [ | ||
1365 | 'file://%s \\\\' % os.path.basename(self.testfile), | ||
1366 | ] | ||
1367 | self._check_diff(result.output, addlines, removelines) | ||
697 | 1368 | ||
698 | def test_recipetool_appendsrcfile_replace_file_srcdir(self): | 1369 | def test_recipetool_appendsrcfile_replace_file_srcdir(self): |
699 | testrecipe = 'bash' | 1370 | testrecipe = 'bash' |
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py index 747870383b..2cb4445f81 100644 --- a/meta/lib/oeqa/selftest/cases/recipeutils.py +++ b/meta/lib/oeqa/selftest/cases/recipeutils.py | |||
@@ -1,15 +1,13 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | import os | ||
6 | import re | ||
7 | import time | ||
8 | import logging | ||
9 | import bb.tinfoil | 7 | import bb.tinfoil |
10 | 8 | ||
11 | from oeqa.selftest.case import OESelftestTestCase | 9 | from oeqa.selftest.case import OESelftestTestCase |
12 | from oeqa.utils.commands import runCmd, get_test_layer | 10 | from oeqa.utils.commands import get_test_layer |
13 | 11 | ||
14 | 12 | ||
15 | def setUpModule(): | 13 | def setUpModule(): |
@@ -40,7 +38,7 @@ class RecipeUtilsTests(OESelftestTestCase): | |||
40 | SUMMARY = "Python framework to process interdependent tasks in a pool of workers" | 38 | SUMMARY = "Python framework to process interdependent tasks in a pool of workers" |
41 | HOMEPAGE = "http://github.com/gitpython-developers/async" | 39 | HOMEPAGE = "http://github.com/gitpython-developers/async" |
42 | SECTION = "devel/python" | 40 | SECTION = "devel/python" |
43 | -LICENSE = "BSD" | 41 | -LICENSE = "BSD-3-Clause" |
44 | +LICENSE = "something" | 42 | +LICENSE = "something" |
45 | LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e" | 43 | LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e" |
46 | 44 | ||
@@ -52,7 +50,7 @@ class RecipeUtilsTests(OESelftestTestCase): | |||
52 | +SRC_URI[md5sum] = "aaaaaa" | 50 | +SRC_URI[md5sum] = "aaaaaa" |
53 | SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051" | 51 | SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051" |
54 | 52 | ||
55 | RDEPENDS_${PN} += "${PYTHON_PN}-threading" | 53 | RDEPENDS:${PN} += "python3-threading" |
56 | """ | 54 | """ |
57 | patchlines = [] | 55 | patchlines = [] |
58 | for f in patches: | 56 | for f in patches: |
@@ -80,7 +78,7 @@ class RecipeUtilsTests(OESelftestTestCase): | |||
80 | 78 | ||
81 | -SRC_URI += "file://somefile" | 79 | -SRC_URI += "file://somefile" |
82 | - | 80 | - |
83 | SRC_URI_append = " file://anotherfile" | 81 | SRC_URI:append = " file://anotherfile" |
84 | """ | 82 | """ |
85 | patchlines = [] | 83 | patchlines = [] |
86 | for f in patches: | 84 | for f in patches: |
@@ -105,7 +103,7 @@ class RecipeUtilsTests(OESelftestTestCase): | |||
105 | 103 | ||
106 | -SRC_URI += "file://somefile" | 104 | -SRC_URI += "file://somefile" |
107 | - | 105 | - |
108 | -SRC_URI_append = " file://anotherfile" | 106 | -SRC_URI:append = " file://anotherfile" |
109 | """ | 107 | """ |
110 | patchlines = [] | 108 | patchlines = [] |
111 | for f in patches: | 109 | for f in patches: |
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py index 0d0259477e..80e830136f 100644 --- a/meta/lib/oeqa/selftest/cases/reproducible.py +++ b/meta/lib/oeqa/selftest/cases/reproducible.py | |||
@@ -9,35 +9,13 @@ import bb.utils | |||
9 | import functools | 9 | import functools |
10 | import multiprocessing | 10 | import multiprocessing |
11 | import textwrap | 11 | import textwrap |
12 | import json | ||
13 | import unittest | ||
14 | import tempfile | 12 | import tempfile |
15 | import shutil | 13 | import shutil |
16 | import stat | 14 | import stat |
17 | import os | 15 | import os |
18 | import datetime | 16 | import datetime |
19 | 17 | ||
20 | # For sample packages, see: | ||
21 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-0t7wr_oo/ | ||
22 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-4s9ejwyp/ | ||
23 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-haiwdlbr/ | ||
24 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-hwds3mcl/ | ||
25 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201203-sua0pzvc/ | ||
26 | # (both packages/ and packages-excluded/) | ||
27 | |||
28 | # ruby-ri-docs, meson: | ||
29 | #https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20210215-0_td9la2/packages/diff-html/ | ||
30 | exclude_packages = [ | 18 | exclude_packages = [ |
31 | 'glide', | ||
32 | 'go-dep', | ||
33 | 'go-helloworld', | ||
34 | 'go-runtime', | ||
35 | 'go_', | ||
36 | 'go-', | ||
37 | 'meson', | ||
38 | 'ovmf-shell-efi', | ||
39 | 'perf', | ||
40 | 'ruby-ri-docs' | ||
41 | ] | 19 | ] |
42 | 20 | ||
43 | def is_excluded(package): | 21 | def is_excluded(package): |
@@ -65,13 +43,14 @@ class CompareResult(object): | |||
65 | return (self.status, self.test) < (other.status, other.test) | 43 | return (self.status, self.test) < (other.status, other.test) |
66 | 44 | ||
67 | class PackageCompareResults(object): | 45 | class PackageCompareResults(object): |
68 | def __init__(self): | 46 | def __init__(self, exclusions): |
69 | self.total = [] | 47 | self.total = [] |
70 | self.missing = [] | 48 | self.missing = [] |
71 | self.different = [] | 49 | self.different = [] |
72 | self.different_excluded = [] | 50 | self.different_excluded = [] |
73 | self.same = [] | 51 | self.same = [] |
74 | self.active_exclusions = set() | 52 | self.active_exclusions = set() |
53 | exclude_packages.extend((exclusions or "").split()) | ||
75 | 54 | ||
76 | def add_result(self, r): | 55 | def add_result(self, r): |
77 | self.total.append(r) | 56 | self.total.append(r) |
@@ -118,8 +97,9 @@ def compare_file(reference, test, diffutils_sysroot): | |||
118 | result.status = SAME | 97 | result.status = SAME |
119 | return result | 98 | return result |
120 | 99 | ||
121 | def run_diffoscope(a_dir, b_dir, html_dir, **kwargs): | 100 | def run_diffoscope(a_dir, b_dir, html_dir, max_report_size=0, **kwargs): |
122 | return runCmd(['diffoscope', '--no-default-limits', '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir], | 101 | return runCmd(['diffoscope', '--no-default-limits', '--max-report-size', str(max_report_size), |
102 | '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir], | ||
123 | **kwargs) | 103 | **kwargs) |
124 | 104 | ||
125 | class DiffoscopeTests(OESelftestTestCase): | 105 | class DiffoscopeTests(OESelftestTestCase): |
@@ -149,10 +129,15 @@ class ReproducibleTests(OESelftestTestCase): | |||
149 | 129 | ||
150 | package_classes = ['deb', 'ipk', 'rpm'] | 130 | package_classes = ['deb', 'ipk', 'rpm'] |
151 | 131 | ||
132 | # Maximum report size, in bytes | ||
133 | max_report_size = 250 * 1024 * 1024 | ||
134 | |||
152 | # targets are the things we want to test the reproducibility of | 135 | # targets are the things we want to test the reproducibility of |
153 | targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world'] | 136 | targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world'] |
137 | |||
154 | # sstate targets are things to pull from sstate to potentially cut build/debugging time | 138 | # sstate targets are things to pull from sstate to potentially cut build/debugging time |
155 | sstate_targets = [] | 139 | sstate_targets = [] |
140 | |||
156 | save_results = False | 141 | save_results = False |
157 | if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ: | 142 | if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ: |
158 | save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT'] | 143 | save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT'] |
@@ -167,11 +152,29 @@ class ReproducibleTests(OESelftestTestCase): | |||
167 | 152 | ||
168 | def setUpLocal(self): | 153 | def setUpLocal(self): |
169 | super().setUpLocal() | 154 | super().setUpLocal() |
170 | needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS'] | 155 | needed_vars = [ |
156 | 'TOPDIR', | ||
157 | 'TARGET_PREFIX', | ||
158 | 'BB_NUMBER_THREADS', | ||
159 | 'BB_HASHSERVE', | ||
160 | 'OEQA_REPRODUCIBLE_TEST_PACKAGE', | ||
161 | 'OEQA_REPRODUCIBLE_TEST_TARGET', | ||
162 | 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS', | ||
163 | 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES', | ||
164 | ] | ||
171 | bb_vars = get_bb_vars(needed_vars) | 165 | bb_vars = get_bb_vars(needed_vars) |
172 | for v in needed_vars: | 166 | for v in needed_vars: |
173 | setattr(self, v.lower(), bb_vars[v]) | 167 | setattr(self, v.lower(), bb_vars[v]) |
174 | 168 | ||
169 | if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']: | ||
170 | self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split() | ||
171 | |||
172 | if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET']: | ||
173 | self.targets = bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'].split() | ||
174 | |||
175 | if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']: | ||
176 | self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split() | ||
177 | |||
175 | self.extraresults = {} | 178 | self.extraresults = {} |
176 | self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = '' | 179 | self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = '' |
177 | self.extraresults.setdefault('reproducible', {}).setdefault('files', {}) | 180 | self.extraresults.setdefault('reproducible', {}).setdefault('files', {}) |
@@ -180,7 +183,7 @@ class ReproducibleTests(OESelftestTestCase): | |||
180 | self.extraresults['reproducible.rawlogs']['log'] += msg | 183 | self.extraresults['reproducible.rawlogs']['log'] += msg |
181 | 184 | ||
182 | def compare_packages(self, reference_dir, test_dir, diffutils_sysroot): | 185 | def compare_packages(self, reference_dir, test_dir, diffutils_sysroot): |
183 | result = PackageCompareResults() | 186 | result = PackageCompareResults(self.oeqa_reproducible_excluded_packages) |
184 | 187 | ||
185 | old_cwd = os.getcwd() | 188 | old_cwd = os.getcwd() |
186 | try: | 189 | try: |
@@ -219,12 +222,10 @@ class ReproducibleTests(OESelftestTestCase): | |||
219 | bb.utils.remove(tmpdir, recurse=True) | 222 | bb.utils.remove(tmpdir, recurse=True) |
220 | 223 | ||
221 | config = textwrap.dedent('''\ | 224 | config = textwrap.dedent('''\ |
222 | INHERIT += "reproducible_build" | ||
223 | PACKAGE_CLASSES = "{package_classes}" | 225 | PACKAGE_CLASSES = "{package_classes}" |
224 | INHIBIT_PACKAGE_STRIP = "1" | ||
225 | TMPDIR = "{tmpdir}" | 226 | TMPDIR = "{tmpdir}" |
226 | LICENSE_FLAGS_WHITELIST = "commercial" | 227 | LICENSE_FLAGS_ACCEPTED = "commercial" |
227 | DISTRO_FEATURES_append = ' systemd pam' | 228 | DISTRO_FEATURES:append = ' pam' |
228 | USERADDEXTENSION = "useradd-staticids" | 229 | USERADDEXTENSION = "useradd-staticids" |
229 | USERADD_ERROR_DYNAMIC = "skip" | 230 | USERADD_ERROR_DYNAMIC = "skip" |
230 | USERADD_UID_TABLES += "files/static-passwd" | 231 | USERADD_UID_TABLES += "files/static-passwd" |
@@ -242,7 +243,7 @@ class ReproducibleTests(OESelftestTestCase): | |||
242 | # mirror, forcing a complete build from scratch | 243 | # mirror, forcing a complete build from scratch |
243 | config += textwrap.dedent('''\ | 244 | config += textwrap.dedent('''\ |
244 | SSTATE_DIR = "${TMPDIR}/sstate" | 245 | SSTATE_DIR = "${TMPDIR}/sstate" |
245 | SSTATE_MIRRORS = "" | 246 | SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH" |
246 | ''') | 247 | ''') |
247 | 248 | ||
248 | self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) | 249 | self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) |
@@ -309,9 +310,13 @@ class ReproducibleTests(OESelftestTestCase): | |||
309 | self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)])) | 310 | self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)])) |
310 | self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)])) | 311 | self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)])) |
311 | 312 | ||
312 | if result.missing or result.different: | 313 | if result.different: |
313 | fails.append("The following %s packages are missing or different and not in exclusion list: %s" % | 314 | fails.append("The following %s packages are different and not in exclusion list:\n%s" % |
314 | (c, '\n'.join(r.test for r in (result.missing + result.different)))) | 315 | (c, '\n'.join(r.test for r in (result.different)))) |
316 | |||
317 | if result.missing and len(self.sstate_targets) == 0: | ||
318 | fails.append("The following %s packages are missing and not in exclusion list:\n%s" % | ||
319 | (c, '\n'.join(r.test for r in (result.missing)))) | ||
315 | 320 | ||
316 | # Clean up empty directories | 321 | # Clean up empty directories |
317 | if self.save_results: | 322 | if self.save_results: |
@@ -325,7 +330,7 @@ class ReproducibleTests(OESelftestTestCase): | |||
325 | # Copy jquery to improve the diffoscope output usability | 330 | # Copy jquery to improve the diffoscope output usability |
326 | self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) | 331 | self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) |
327 | 332 | ||
328 | run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, | 333 | run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, max_report_size=self.max_report_size, |
329 | native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) | 334 | native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) |
330 | 335 | ||
331 | if fails: | 336 | if fails: |
diff --git a/meta/lib/oeqa/selftest/cases/resulttooltests.py b/meta/lib/oeqa/selftest/cases/resulttooltests.py index dac5c46801..c3303f3fbb 100644 --- a/meta/lib/oeqa/selftest/cases/resulttooltests.py +++ b/meta/lib/oeqa/selftest/cases/resulttooltests.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -69,7 +71,7 @@ class ResultToolTests(OESelftestTestCase): | |||
69 | self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results) | 71 | self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results) |
70 | self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results) | 72 | self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results) |
71 | 73 | ||
72 | def test_regrresion_can_get_regression_result(self): | 74 | def test_regression_can_get_regression_result(self): |
73 | base_result_data = {'result': {'test1': {'status': 'PASSED'}, | 75 | base_result_data = {'result': {'test1': {'status': 'PASSED'}, |
74 | 'test2': {'status': 'PASSED'}, | 76 | 'test2': {'status': 'PASSED'}, |
75 | 'test3': {'status': 'FAILED'}, | 77 | 'test3': {'status': 'FAILED'}, |
@@ -96,3 +98,278 @@ class ResultToolTests(OESelftestTestCase): | |||
96 | resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map) | 98 | resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map) |
97 | self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results)) | 99 | self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results)) |
98 | 100 | ||
101 | def test_results_without_metadata_can_be_compared(self): | ||
102 | base_configuration = {"configuration": { | ||
103 | "TEST_TYPE": "oeselftest", | ||
104 | "TESTSERIES": "series1", | ||
105 | "IMAGE_BASENAME": "image", | ||
106 | "IMAGE_PKGTYPE": "ipk", | ||
107 | "DISTRO": "mydistro", | ||
108 | "MACHINE": "qemux86", | ||
109 | "STARTTIME": 1672527600 | ||
110 | }, "result": {}} | ||
111 | target_configuration = {"configuration": { | ||
112 | "TEST_TYPE": "oeselftest", | ||
113 | "TESTSERIES": "series1", | ||
114 | "IMAGE_BASENAME": "image", | ||
115 | "IMAGE_PKGTYPE": "ipk", | ||
116 | "DISTRO": "mydistro", | ||
117 | "MACHINE": "qemux86", | ||
118 | "STARTTIME": 1672527600 | ||
119 | }, "result": {}} | ||
120 | self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
121 | msg="incorrect metadata filtering, tests without metadata should be compared") | ||
122 | |||
123 | def test_target_result_with_missing_metadata_can_not_be_compared(self): | ||
124 | base_configuration = {"configuration": { | ||
125 | "TEST_TYPE": "oeselftest", | ||
126 | "TESTSERIES": "series1", | ||
127 | "IMAGE_BASENAME": "image", | ||
128 | "IMAGE_PKGTYPE": "ipk", | ||
129 | "DISTRO": "mydistro", | ||
130 | "MACHINE": "qemux86", | ||
131 | "OESELFTEST_METADATA": { | ||
132 | "run_all_tests": True, | ||
133 | "run_tests": None, | ||
134 | "skips": None, | ||
135 | "machine": None, | ||
136 | "select_tags": ["toolchain-user", "toolchain-system"], | ||
137 | "exclude_tags": None | ||
138 | }}, "result": {}} | ||
139 | target_configuration = {"configuration": {"TEST_TYPE": "oeselftest", | ||
140 | "TESTSERIES": "series1", | ||
141 | "IMAGE_BASENAME": "image", | ||
142 | "IMAGE_PKGTYPE": "ipk", | ||
143 | "DISTRO": "mydistro", | ||
144 | "MACHINE": "qemux86", | ||
145 | "STARTTIME": 1672527600 | ||
146 | }, "result": {}} | ||
147 | self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
148 | msg="incorrect metadata filtering, tests should not be compared") | ||
149 | |||
150 | def test_results_with_matching_metadata_can_be_compared(self): | ||
151 | base_configuration = {"configuration": { | ||
152 | "TEST_TYPE": "oeselftest", | ||
153 | "TESTSERIES": "series1", | ||
154 | "IMAGE_BASENAME": "image", | ||
155 | "IMAGE_PKGTYPE": "ipk", | ||
156 | "DISTRO": "mydistro", | ||
157 | "MACHINE": "qemux86", | ||
158 | "STARTTIME": 1672527600, | ||
159 | "OESELFTEST_METADATA": {"run_all_tests": True, | ||
160 | "run_tests": None, | ||
161 | "skips": None, | ||
162 | "machine": None, | ||
163 | "select_tags": ["toolchain-user", "toolchain-system"], | ||
164 | "exclude_tags": None} | ||
165 | }, "result": {}} | ||
166 | target_configuration = {"configuration": { | ||
167 | "TEST_TYPE": "oeselftest", | ||
168 | "TESTSERIES": "series1", | ||
169 | "IMAGE_BASENAME": "image", | ||
170 | "IMAGE_PKGTYPE": "ipk", | ||
171 | "DISTRO": "mydistro", | ||
172 | "MACHINE": "qemux86", | ||
173 | "STARTTIME": 1672527600, | ||
174 | "OESELFTEST_METADATA": {"run_all_tests": True, | ||
175 | "run_tests": None, | ||
176 | "skips": None, | ||
177 | "machine": None, | ||
178 | "select_tags": ["toolchain-user", "toolchain-system"], | ||
179 | "exclude_tags": None} | ||
180 | }, "result": {}} | ||
181 | self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
182 | msg="incorrect metadata filtering, tests with matching metadata should be compared") | ||
183 | |||
184 | def test_results_with_mismatching_metadata_can_not_be_compared(self): | ||
185 | base_configuration = {"configuration": { | ||
186 | "TEST_TYPE": "oeselftest", | ||
187 | "TESTSERIES": "series1", | ||
188 | "IMAGE_BASENAME": "image", | ||
189 | "IMAGE_PKGTYPE": "ipk", | ||
190 | "DISTRO": "mydistro", | ||
191 | "MACHINE": "qemux86", | ||
192 | "STARTTIME": 1672527600, | ||
193 | "OESELFTEST_METADATA": {"run_all_tests": True, | ||
194 | "run_tests": None, | ||
195 | "skips": None, | ||
196 | "machine": None, | ||
197 | "select_tags": ["toolchain-user", "toolchain-system"], | ||
198 | "exclude_tags": None} | ||
199 | }, "result": {}} | ||
200 | target_configuration = {"configuration": { | ||
201 | "TEST_TYPE": "oeselftest", | ||
202 | "TESTSERIES": "series1", | ||
203 | "IMAGE_BASENAME": "image", | ||
204 | "IMAGE_PKGTYPE": "ipk", | ||
205 | "DISTRO": "mydistro", | ||
206 | "MACHINE": "qemux86", | ||
207 | "STARTTIME": 1672527600, | ||
208 | "OESELFTEST_METADATA": {"run_all_tests": True, | ||
209 | "run_tests": None, | ||
210 | "skips": None, | ||
211 | "machine": None, | ||
212 | "select_tags": ["machine"], | ||
213 | "exclude_tags": None} | ||
214 | }, "result": {}} | ||
215 | self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
216 | msg="incorrect metadata filtering, tests with mismatching metadata should not be compared") | ||
217 | |||
218 | def test_metadata_matching_is_only_checked_for_relevant_test_type(self): | ||
219 | base_configuration = {"configuration": {"TEST_TYPE": "runtime", | ||
220 | "TESTSERIES": "series1", | ||
221 | "IMAGE_BASENAME": "image", | ||
222 | "IMAGE_PKGTYPE": "ipk", | ||
223 | "DISTRO": "mydistro", | ||
224 | "MACHINE": "qemux86", | ||
225 | "STARTTIME": 1672527600, | ||
226 | "OESELFTEST_METADATA": {"run_all_tests": True, | ||
227 | "run_tests": None, | ||
228 | "skips": None, | ||
229 | "machine": None, | ||
230 | "select_tags": ["toolchain-user", "toolchain-system"], | ||
231 | "exclude_tags": None}}, "result": {}} | ||
232 | target_configuration = {"configuration": {"TEST_TYPE": "runtime", | ||
233 | "TESTSERIES": "series1", | ||
234 | "IMAGE_BASENAME": "image", | ||
235 | "IMAGE_PKGTYPE": "ipk", | ||
236 | "DISTRO": "mydistro", | ||
237 | "MACHINE": "qemux86", | ||
238 | "STARTTIME": 1672527600, | ||
239 | "OESELFTEST_METADATA": {"run_all_tests": True, | ||
240 | "run_tests": None, | ||
241 | "skips": None, | ||
242 | "machine": None, | ||
243 | "select_tags": ["machine"], | ||
244 | "exclude_tags": None}}, "result": {}} | ||
245 | self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
246 | msg="incorrect metadata filtering, %s tests should be compared" % base_configuration['configuration']['TEST_TYPE']) | ||
247 | |||
248 | def test_machine_matches(self): | ||
249 | base_configuration = {"configuration": { | ||
250 | "TEST_TYPE": "runtime", | ||
251 | "MACHINE": "qemux86"}, "result": {}} | ||
252 | target_configuration = {"configuration": { | ||
253 | "TEST_TYPE": "runtime", | ||
254 | "MACHINE": "qemux86" | ||
255 | }, "result": {}} | ||
256 | self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
257 | msg="incorrect machine filtering, identical machine tests should be compared") | ||
258 | |||
259 | def test_machine_mismatches(self): | ||
260 | base_configuration = {"configuration": { | ||
261 | "TEST_TYPE": "runtime", | ||
262 | "MACHINE": "qemux86" | ||
263 | }, "result": {}} | ||
264 | target_configuration = {"configuration": { | ||
265 | "TEST_TYPE": "runtime", | ||
266 | "MACHINE": "qemux86_64" | ||
267 | }, "result": {}} | ||
268 | self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
269 | msg="incorrect machine filtering, mismatching machine tests should not be compared") | ||
270 | |||
271 | def test_can_not_compare_non_ltp_tests(self): | ||
272 | base_configuration = {"configuration": { | ||
273 | "TEST_TYPE": "runtime", | ||
274 | "MACHINE": "qemux86" | ||
275 | }, "result": { | ||
276 | "ltpresult_foo": { | ||
277 | "status": "PASSED" | ||
278 | }}} | ||
279 | target_configuration = {"configuration": { | ||
280 | "TEST_TYPE": "runtime", | ||
281 | "MACHINE": "qemux86_64" | ||
282 | }, "result": { | ||
283 | "bar": { | ||
284 | "status": "PASSED" | ||
285 | }}} | ||
286 | self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
287 | msg="incorrect ltpresult filtering, mismatching ltpresult content should not be compared") | ||
288 | |||
289 | def test_can_compare_ltp_tests(self): | ||
290 | base_configuration = {"configuration": { | ||
291 | "TEST_TYPE": "runtime", | ||
292 | "MACHINE": "qemux86" | ||
293 | }, "result": { | ||
294 | "ltpresult_foo": { | ||
295 | "status": "PASSED" | ||
296 | }}} | ||
297 | target_configuration = {"configuration": { | ||
298 | "TEST_TYPE": "runtime", | ||
299 | "MACHINE": "qemux86" | ||
300 | }, "result": { | ||
301 | "ltpresult_foo": { | ||
302 | "status": "PASSED" | ||
303 | }}} | ||
304 | self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration), | ||
305 | msg="incorrect ltpresult filtering, matching ltpresult content should be compared") | ||
306 | |||
307 | def test_can_match_non_static_ptest_names(self): | ||
308 | base_configuration = {"a": { | ||
309 | "conf_X": { | ||
310 | "configuration": { | ||
311 | "TEST_TYPE": "runtime", | ||
312 | "MACHINE": "qemux86" | ||
313 | }, "result": { | ||
314 | "ptestresult.lttng-tools.foo_-_bar_-_moo": { | ||
315 | "status": "PASSED" | ||
316 | }, | ||
317 | "ptestresult.babeltrace.bar_-_moo_-_foo": { | ||
318 | "status": "PASSED" | ||
319 | }, | ||
320 | "ptestresult.babeltrace2.moo_-_foo_-_bar": { | ||
321 | "status": "PASSED" | ||
322 | }, | ||
323 | "ptestresult.curl.test_0000__foo_out_of_bar": { | ||
324 | "status": "PASSED" | ||
325 | }, | ||
326 | "ptestresult.dbus.test_0000__foo_out_of_bar,_remaining:_00:02,_took_0.032s,_duration:_03:32_": { | ||
327 | "status": "PASSED" | ||
328 | }, | ||
329 | "ptestresult.binutils-ld.in testcase /foo/build-st-bar/moo/ctf.exp": { | ||
330 | "status": "PASSED" | ||
331 | }, | ||
332 | "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.30975 on target": { | ||
333 | "status": "PASSED" | ||
334 | }, | ||
335 | "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": { | ||
336 | "status": "PASSED" | ||
337 | } | ||
338 | }}}} | ||
339 | target_configuration = {"a": { | ||
340 | "conf_Y": { | ||
341 | "configuration": { | ||
342 | "TEST_TYPE": "runtime", | ||
343 | "MACHINE": "qemux86" | ||
344 | }, "result": { | ||
345 | "ptestresult.lttng-tools.foo_-_yyy_-_zzz": { | ||
346 | "status": "PASSED" | ||
347 | }, | ||
348 | "ptestresult.babeltrace.bar_-_zzz_-_xxx": { | ||
349 | "status": "PASSED" | ||
350 | }, | ||
351 | "ptestresult.babeltrace2.moo_-_xxx_-_yyy": { | ||
352 | "status": "PASSED" | ||
353 | }, | ||
354 | "ptestresult.curl.test_0000__xxx_out_of_yyy": { | ||
355 | "status": "PASSED" | ||
356 | }, | ||
357 | "ptestresult.dbus.test_0000__yyy_out_of_zzz,_remaining:_00:03,_took_0.034s,_duration:_03:30_": { | ||
358 | "status": "PASSED" | ||
359 | }, | ||
360 | "ptestresult.binutils-ld.in testcase /xxx/build-st-yyy/zzz/ctf.exp": { | ||
361 | "status": "PASSED" | ||
362 | }, | ||
363 | "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.45678 on target": { | ||
364 | "status": "PASSED" | ||
365 | }, | ||
366 | "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": { | ||
367 | "status": "PASSED" | ||
368 | } | ||
369 | }}}} | ||
370 | regression.fixup_ptest_names(base_configuration, self.logger) | ||
371 | regression.fixup_ptest_names(target_configuration, self.logger) | ||
372 | result, resultstring = regression.compare_result( | ||
373 | self.logger, "A", "B", base_configuration["a"]["conf_X"], target_configuration["a"]["conf_Y"]) | ||
374 | self.assertDictEqual( | ||
375 | result, {}, msg=f"ptests should be compared: {resultstring}") | ||
diff --git a/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py new file mode 100644 index 0000000000..44e2c09a6f --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py | |||
@@ -0,0 +1,97 @@ | |||
1 | # SPDX-FileCopyrightText: Huawei Inc. | ||
2 | # | ||
3 | # SPDX-License-Identifier: MIT | ||
4 | |||
5 | import os | ||
6 | import oe | ||
7 | import unittest | ||
8 | from oeqa.selftest.case import OESelftestTestCase | ||
9 | from oeqa.utils.commands import bitbake, get_bb_vars | ||
10 | |||
11 | class ShadowUtilsTidyFiles(OESelftestTestCase): | ||
12 | """ | ||
13 | Check if shadow image rootfs files are tidy. | ||
14 | |||
15 | The tests are focused on testing the functionality provided by the | ||
16 | 'tidy_shadowutils_files' rootfs postprocess command (via | ||
17 | SORT_PASSWD_POSTPROCESS_COMMAND). | ||
18 | """ | ||
19 | |||
20 | def sysconf_build(self): | ||
21 | """ | ||
22 | Verify if shadow tidy files tests are to be run and if yes, build a | ||
23 | test image and return its sysconf rootfs path. | ||
24 | """ | ||
25 | |||
26 | test_image = "core-image-minimal" | ||
27 | |||
28 | config = 'IMAGE_CLASSES += "extrausers"\n' | ||
29 | config += 'EXTRA_USERS_PARAMS = "groupadd -g 1000 oeqatester; "\n' | ||
30 | config += 'EXTRA_USERS_PARAMS += "useradd -p \'\' -u 1000 -N -g 1000 oeqatester; "\n' | ||
31 | self.write_config(config) | ||
32 | |||
33 | vars = get_bb_vars(("IMAGE_ROOTFS", "SORT_PASSWD_POSTPROCESS_COMMAND", "sysconfdir"), | ||
34 | test_image) | ||
35 | passwd_postprocess_cmd = vars["SORT_PASSWD_POSTPROCESS_COMMAND"] | ||
36 | self.assertIsNotNone(passwd_postprocess_cmd) | ||
37 | if (passwd_postprocess_cmd.strip() != 'tidy_shadowutils_files;'): | ||
38 | raise unittest.SkipTest("Testcase skipped as 'tidy_shadowutils_files' " | ||
39 | "rootfs post process command is not the set SORT_PASSWD_POSTPROCESS_COMMAND.") | ||
40 | |||
41 | rootfs = vars["IMAGE_ROOTFS"] | ||
42 | self.assertIsNotNone(rootfs) | ||
43 | sysconfdir = vars["sysconfdir"] | ||
44 | bitbake(test_image) | ||
45 | self.assertIsNotNone(sysconfdir) | ||
46 | |||
47 | return oe.path.join(rootfs, sysconfdir) | ||
48 | |||
49 | def test_shadowutils_backup_files(self): | ||
50 | """ | ||
51 | Test that the rootfs doesn't include any known shadow backup files. | ||
52 | """ | ||
53 | |||
54 | backup_files = ( | ||
55 | 'group-', | ||
56 | 'gshadow-', | ||
57 | 'passwd-', | ||
58 | 'shadow-', | ||
59 | 'subgid-', | ||
60 | 'subuid-', | ||
61 | ) | ||
62 | |||
63 | rootfs_sysconfdir = self.sysconf_build() | ||
64 | found = [] | ||
65 | for backup_file in backup_files: | ||
66 | backup_filepath = oe.path.join(rootfs_sysconfdir, backup_file) | ||
67 | if os.path.exists(backup_filepath): | ||
68 | found.append(backup_file) | ||
69 | if (found): | ||
70 | raise Exception('The following shadow backup files were found in ' | ||
71 | 'the rootfs: %s' % found) | ||
72 | |||
73 | def test_shadowutils_sorted_files(self): | ||
74 | """ | ||
75 | Test that the 'passwd' and the 'group' shadow utils files are ordered | ||
76 | by ID. | ||
77 | """ | ||
78 | |||
79 | files = ( | ||
80 | 'passwd', | ||
81 | 'group', | ||
82 | ) | ||
83 | |||
84 | rootfs_sysconfdir = self.sysconf_build() | ||
85 | unsorted = [] | ||
86 | for file in files: | ||
87 | filepath = oe.path.join(rootfs_sysconfdir, file) | ||
88 | with open(filepath, 'rb') as f: | ||
89 | ids = [] | ||
90 | lines = f.readlines() | ||
91 | for line in lines: | ||
92 | entries = line.split(b':') | ||
93 | ids.append(int(entries[2])) | ||
94 | if (ids != sorted(ids)): | ||
95 | unsorted.append(file) | ||
96 | if (unsorted): | ||
97 | raise Exception("The following files were not sorted by ID as expected: %s" % unsorted) | ||
diff --git a/meta/lib/oeqa/selftest/cases/rpmtests.py b/meta/lib/oeqa/selftest/cases/rpmtests.py new file mode 100644 index 0000000000..902d7dca3d --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/rpmtests.py | |||
@@ -0,0 +1,14 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.utils.commands import bitbake | ||
9 | |||
10 | class BitbakeTests(OESelftestTestCase): | ||
11 | |||
12 | def test_rpm_filenames(self): | ||
13 | test_recipe = "testrpm" | ||
14 | bitbake(test_recipe) | ||
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py index fa6113d7fa..70047ca0ca 100644 --- a/meta/lib/oeqa/selftest/cases/runcmd.py +++ b/meta/lib/oeqa/selftest/cases/runcmd.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -27,8 +29,8 @@ class RunCmdTests(OESelftestTestCase): | |||
27 | 29 | ||
28 | # The delta is intentionally smaller than the timeout, to detect cases where | 30 | # The delta is intentionally smaller than the timeout, to detect cases where |
29 | # we incorrectly apply the timeout more than once. | 31 | # we incorrectly apply the timeout more than once. |
30 | TIMEOUT = 5 | 32 | TIMEOUT = 10 |
31 | DELTA = 3 | 33 | DELTA = 8 |
32 | 34 | ||
33 | def test_result_okay(self): | 35 | def test_result_okay(self): |
34 | result = runCmd("true") | 36 | result = runCmd("true") |
@@ -56,11 +58,11 @@ class RunCmdTests(OESelftestTestCase): | |||
56 | self.assertEqual(result.status, 0) | 58 | self.assertEqual(result.status, 0) |
57 | 59 | ||
58 | def test_result_assertion(self): | 60 | def test_result_assertion(self): |
59 | self.assertRaisesRegexp(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar", | 61 | self.assertRaisesRegex(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar", |
60 | runCmd, "echo foobar >&2; false", shell=True) | 62 | runCmd, "echo foobar >&2; false", shell=True) |
61 | 63 | ||
62 | def test_result_exception(self): | 64 | def test_result_exception(self): |
63 | self.assertRaisesRegexp(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar", | 65 | self.assertRaisesRegex(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar", |
64 | runCmd, "echo foobar >&2; false", shell=True, assert_error=False) | 66 | runCmd, "echo foobar >&2; false", shell=True, assert_error=False) |
65 | 67 | ||
66 | def test_output(self): | 68 | def test_output(self): |
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py index 7e676bcb41..f01e1eec66 100644 --- a/meta/lib/oeqa/selftest/cases/runqemu.py +++ b/meta/lib/oeqa/selftest/cases/runqemu.py | |||
@@ -4,14 +4,17 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | import os | ||
7 | import re | 8 | import re |
8 | import tempfile | ||
9 | import time | 9 | import time |
10 | import oe.types | 10 | import oe.types |
11 | from oeqa.core.decorator import OETestTag | 11 | from oeqa.core.decorator import OETestTag |
12 | from oeqa.core.decorator.data import skipIfNotArch, skipIfNotMachine | ||
12 | from oeqa.selftest.case import OESelftestTestCase | 13 | from oeqa.selftest.case import OESelftestTestCase |
13 | from oeqa.utils.commands import bitbake, runqemu, get_bb_var, runCmd | 14 | from oeqa.utils.commands import bitbake, runqemu, get_bb_var |
14 | 15 | ||
16 | |||
17 | @OETestTag("runqemu") | ||
15 | class RunqemuTests(OESelftestTestCase): | 18 | class RunqemuTests(OESelftestTestCase): |
16 | """Runqemu test class""" | 19 | """Runqemu test class""" |
17 | 20 | ||
@@ -21,23 +24,26 @@ class RunqemuTests(OESelftestTestCase): | |||
21 | def setUpLocal(self): | 24 | def setUpLocal(self): |
22 | super(RunqemuTests, self).setUpLocal() | 25 | super(RunqemuTests, self).setUpLocal() |
23 | self.recipe = 'core-image-minimal' | 26 | self.recipe = 'core-image-minimal' |
24 | self.machine = 'qemux86-64' | 27 | self.machine = self.td['MACHINE'] |
25 | self.fstypes = "ext4 iso hddimg wic.vmdk wic.qcow2 wic.vdi" | 28 | self.image_link_name = get_bb_var('IMAGE_LINK_NAME', self.recipe) |
26 | self.cmd_common = "runqemu nographic" | ||
27 | 29 | ||
28 | kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), 'x86_64') | 30 | self.fstypes = "ext4" |
31 | if self.td["HOST_ARCH"] in ('i586', 'i686', 'x86_64'): | ||
32 | self.fstypes += " iso hddimg" | ||
33 | if self.machine == "qemux86-64": | ||
34 | self.fstypes += " wic.vmdk wic.qcow2 wic.vdi" | ||
35 | |||
36 | self.cmd_common = "runqemu nographic" | ||
37 | kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), self.td["TARGET_ARCH"]) | ||
29 | if kvm: | 38 | if kvm: |
30 | self.cmd_common += " kvm" | 39 | self.cmd_common += " kvm" |
31 | 40 | ||
32 | self.write_config( | 41 | self.write_config( |
33 | """ | 42 | """ |
34 | MACHINE = "%s" | ||
35 | IMAGE_FSTYPES = "%s" | 43 | IMAGE_FSTYPES = "%s" |
36 | # 10 means 1 second | 44 | # 10 means 1 second |
37 | SYSLINUX_TIMEOUT = "10" | 45 | SYSLINUX_TIMEOUT = "10" |
38 | """ | 46 | """ % self.fstypes) |
39 | % (self.machine, self.fstypes) | ||
40 | ) | ||
41 | 47 | ||
42 | if not RunqemuTests.image_is_ready: | 48 | if not RunqemuTests.image_is_ready: |
43 | RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 49 | RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') |
@@ -56,14 +62,17 @@ SYSLINUX_TIMEOUT = "10" | |||
56 | cmd = "%s %s ext4" % (self.cmd_common, self.machine) | 62 | cmd = "%s %s ext4" % (self.cmd_common, self.machine) |
57 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: | 63 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: |
58 | with open(qemu.qemurunnerlog) as f: | 64 | with open(qemu.qemurunnerlog) as f: |
59 | self.assertIn('rootfs.ext4', f.read(), "Failed: %s" % cmd) | 65 | regexp = r'\nROOTFS: .*\.ext4]\n' |
66 | self.assertRegex(f.read(), regexp, "Failed to find '%s' in '%s' after running '%s'" % (regexp, qemu.qemurunnerlog, cmd)) | ||
60 | 67 | ||
68 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
61 | def test_boot_machine_iso(self): | 69 | def test_boot_machine_iso(self): |
62 | """Test runqemu machine iso""" | 70 | """Test runqemu machine iso""" |
63 | cmd = "%s %s iso" % (self.cmd_common, self.machine) | 71 | cmd = "%s %s iso" % (self.cmd_common, self.machine) |
64 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: | 72 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: |
65 | with open(qemu.qemurunnerlog) as f: | 73 | with open(qemu.qemurunnerlog) as f: |
66 | self.assertIn('media=cdrom', f.read(), "Failed: %s" % cmd) | 74 | text_in = 'media=cdrom' |
75 | self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd)) | ||
67 | 76 | ||
68 | def test_boot_recipe_image(self): | 77 | def test_boot_recipe_image(self): |
69 | """Test runqemu recipe-image""" | 78 | """Test runqemu recipe-image""" |
@@ -72,20 +81,24 @@ SYSLINUX_TIMEOUT = "10" | |||
72 | with open(qemu.qemurunnerlog) as f: | 81 | with open(qemu.qemurunnerlog) as f: |
73 | self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) | 82 | self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) |
74 | 83 | ||
75 | 84 | # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963 | |
85 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
76 | def test_boot_recipe_image_vmdk(self): | 86 | def test_boot_recipe_image_vmdk(self): |
77 | """Test runqemu recipe-image vmdk""" | 87 | """Test runqemu recipe-image vmdk""" |
78 | cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe) | 88 | cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe) |
79 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: | 89 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: |
80 | with open(qemu.qemurunnerlog) as f: | 90 | with open(qemu.qemurunnerlog) as f: |
81 | self.assertIn('format=vmdk', f.read(), "Failed: %s" % cmd) | 91 | text_in = 'format=vmdk' |
92 | self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd)) | ||
82 | 93 | ||
94 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
83 | def test_boot_recipe_image_vdi(self): | 95 | def test_boot_recipe_image_vdi(self): |
84 | """Test runqemu recipe-image vdi""" | 96 | """Test runqemu recipe-image vdi""" |
85 | cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe) | 97 | cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe) |
86 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: | 98 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: |
87 | with open(qemu.qemurunnerlog) as f: | 99 | with open(qemu.qemurunnerlog) as f: |
88 | self.assertIn('format=vdi', f.read(), "Failed: %s" % cmd) | 100 | text_in = 'format=vdi' |
101 | self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd)) | ||
89 | 102 | ||
90 | def test_boot_deploy(self): | 103 | def test_boot_deploy(self): |
91 | """Test runqemu deploy_dir_image""" | 104 | """Test runqemu deploy_dir_image""" |
@@ -94,7 +107,7 @@ SYSLINUX_TIMEOUT = "10" | |||
94 | with open(qemu.qemurunnerlog) as f: | 107 | with open(qemu.qemurunnerlog) as f: |
95 | self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) | 108 | self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) |
96 | 109 | ||
97 | 110 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | |
98 | def test_boot_deploy_hddimg(self): | 111 | def test_boot_deploy_hddimg(self): |
99 | """Test runqemu deploy_dir_image hddimg""" | 112 | """Test runqemu deploy_dir_image hddimg""" |
100 | cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image) | 113 | cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image) |
@@ -109,6 +122,7 @@ SYSLINUX_TIMEOUT = "10" | |||
109 | with open(qemu.qemurunnerlog) as f: | 122 | with open(qemu.qemurunnerlog) as f: |
110 | self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd) | 123 | self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd) |
111 | 124 | ||
125 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
112 | def test_boot_machine_slirp_qcow2(self): | 126 | def test_boot_machine_slirp_qcow2(self): |
113 | """Test runqemu machine slirp qcow2""" | 127 | """Test runqemu machine slirp qcow2""" |
114 | cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine) | 128 | cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine) |
@@ -118,7 +132,7 @@ SYSLINUX_TIMEOUT = "10" | |||
118 | 132 | ||
119 | def test_boot_qemu_boot(self): | 133 | def test_boot_qemu_boot(self): |
120 | """Test runqemu /path/to/image.qemuboot.conf""" | 134 | """Test runqemu /path/to/image.qemuboot.conf""" |
121 | qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine) | 135 | qemuboot_conf = "%s.qemuboot.conf" % (self.image_link_name) |
122 | qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf) | 136 | qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf) |
123 | if not os.path.exists(qemuboot_conf): | 137 | if not os.path.exists(qemuboot_conf): |
124 | self.skipTest("%s not found" % qemuboot_conf) | 138 | self.skipTest("%s not found" % qemuboot_conf) |
@@ -129,7 +143,7 @@ SYSLINUX_TIMEOUT = "10" | |||
129 | 143 | ||
130 | def test_boot_rootfs(self): | 144 | def test_boot_rootfs(self): |
131 | """Test runqemu /path/to/rootfs.ext4""" | 145 | """Test runqemu /path/to/rootfs.ext4""" |
132 | rootfs = "%s-%s.ext4" % (self.recipe, self.machine) | 146 | rootfs = "%s.ext4" % (self.image_link_name) |
133 | rootfs = os.path.join(self.deploy_dir_image, rootfs) | 147 | rootfs = os.path.join(self.deploy_dir_image, rootfs) |
134 | if not os.path.exists(rootfs): | 148 | if not os.path.exists(rootfs): |
135 | self.skipTest("%s not found" % rootfs) | 149 | self.skipTest("%s not found" % rootfs) |
@@ -149,26 +163,27 @@ SYSLINUX_TIMEOUT = "10" | |||
149 | # bootup various filesystem types, including live image(iso and hddimg) | 163 | # bootup various filesystem types, including live image(iso and hddimg) |
150 | # where live image was not supported on all qemu architecture. | 164 | # where live image was not supported on all qemu architecture. |
151 | @OETestTag("machine") | 165 | @OETestTag("machine") |
166 | @OETestTag("runqemu") | ||
152 | class QemuTest(OESelftestTestCase): | 167 | class QemuTest(OESelftestTestCase): |
153 | 168 | ||
154 | @classmethod | 169 | @classmethod |
155 | def setUpClass(cls): | 170 | def setUpClass(cls): |
156 | super(QemuTest, cls).setUpClass() | 171 | super(QemuTest, cls).setUpClass() |
157 | cls.recipe = 'core-image-minimal' | 172 | cls.recipe = 'core-image-minimal' |
158 | cls.machine = get_bb_var('MACHINE') | 173 | cls.machine = get_bb_var('MACHINE') |
159 | cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 174 | cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') |
175 | cls.image_link_name = get_bb_var('IMAGE_LINK_NAME', cls.recipe) | ||
160 | cls.cmd_common = "runqemu nographic" | 176 | cls.cmd_common = "runqemu nographic" |
161 | cls.qemuboot_conf = "%s-%s.qemuboot.conf" % (cls.recipe, cls.machine) | 177 | cls.qemuboot_conf = "%s.qemuboot.conf" % (cls.image_link_name) |
162 | cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf) | 178 | cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf) |
163 | bitbake(cls.recipe) | 179 | bitbake(cls.recipe) |
164 | 180 | ||
165 | def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout): | 181 | def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout): |
182 | # Allow the runner's LoggingThread instance to exit without errors | ||
183 | # (such as the exception "Console connection closed unexpectedly") | ||
184 | # as qemu will disappear when we shut it down | ||
185 | qemu.runner.allowexit() | ||
166 | qemu.run_serial("shutdown -h now") | 186 | qemu.run_serial("shutdown -h now") |
167 | # Stop thread will stop the LoggingThread instance used for logging | ||
168 | # qemu through serial console, stop thread will prevent this code | ||
169 | # from facing exception (Console connection closed unexpectedly) | ||
170 | # when qemu was shutdown by the above shutdown command | ||
171 | qemu.runner.stop_thread() | ||
172 | time_track = 0 | 187 | time_track = 0 |
173 | try: | 188 | try: |
174 | while True: | 189 | while True: |
@@ -190,22 +205,12 @@ class QemuTest(OESelftestTestCase): | |||
190 | qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) | 205 | qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) |
191 | self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) | 206 | self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) |
192 | 207 | ||
193 | # Need to have portmap/rpcbind running to allow this test to work and | 208 | def test_qemu_can_boot_nfs_and_shutdown(self): |
194 | # current autobuilder setup does not have this. | 209 | rootfs_tar = "%s.tar.bz2" % (self.image_link_name) |
195 | def disabled_test_qemu_can_boot_nfs_and_shutdown(self): | ||
196 | self.assertExists(self.qemuboot_conf) | ||
197 | bitbake('meta-ide-support') | ||
198 | rootfs_tar = "%s-%s.tar.bz2" % (self.recipe, self.machine) | ||
199 | rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar) | 210 | rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar) |
200 | self.assertExists(rootfs_tar) | 211 | self.assertExists(rootfs_tar) |
201 | tmpdir = tempfile.mkdtemp(prefix='qemu_nfs') | 212 | cmd = "%s %s" % (self.cmd_common, rootfs_tar) |
202 | tmpdir_nfs = os.path.join(tmpdir, 'nfs') | ||
203 | cmd_extract_nfs = 'runqemu-extract-sdk %s %s' % (rootfs_tar, tmpdir_nfs) | ||
204 | result = runCmd(cmd_extract_nfs) | ||
205 | self.assertEqual(0, result.status, "runqemu-extract-sdk didn't run as expected. %s" % result.output) | ||
206 | cmd = "%s nfs %s %s" % (self.cmd_common, self.qemuboot_conf, tmpdir_nfs) | ||
207 | shutdown_timeout = 120 | 213 | shutdown_timeout = 120 |
208 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: | 214 | with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: |
209 | qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) | 215 | qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) |
210 | self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) | 216 | self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) |
211 | runCmd('rm -rf %s' % tmpdir) | ||
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py index b20c5b427b..12000aac16 100644 --- a/meta/lib/oeqa/selftest/cases/runtime_test.py +++ b/meta/lib/oeqa/selftest/cases/runtime_test.py | |||
@@ -1,24 +1,20 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | from oeqa.selftest.case import OESelftestTestCase | 7 | from oeqa.selftest.case import OESelftestTestCase |
6 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu | 8 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu |
7 | from oeqa.utils.sshcontrol import SSHControl | 9 | from oeqa.core.decorator import OETestTag |
8 | import os | 10 | import os |
9 | import re | ||
10 | import tempfile | 11 | import tempfile |
11 | import shutil | ||
12 | import oe.lsb | 12 | import oe.lsb |
13 | from oeqa.core.decorator.data import skipIfNotQemu | 13 | from oeqa.core.decorator.data import skipIfNotQemu, skipIfNotMachine |
14 | 14 | ||
15 | class TestExport(OESelftestTestCase): | 15 | class TestExport(OESelftestTestCase): |
16 | 16 | ||
17 | @classmethod | 17 | @OETestTag("runqemu") |
18 | def tearDownClass(cls): | ||
19 | runCmd("rm -rf /tmp/sdk") | ||
20 | super(TestExport, cls).tearDownClass() | ||
21 | |||
22 | def test_testexport_basic(self): | 18 | def test_testexport_basic(self): |
23 | """ | 19 | """ |
24 | Summary: Check basic testexport functionality with only ping test enabled. | 20 | Summary: Check basic testexport functionality with only ping test enabled. |
@@ -29,7 +25,7 @@ class TestExport(OESelftestTestCase): | |||
29 | Author: Mariano Lopez <mariano.lopez@intel.com> | 25 | Author: Mariano Lopez <mariano.lopez@intel.com> |
30 | """ | 26 | """ |
31 | 27 | ||
32 | features = 'INHERIT += "testexport"\n' | 28 | features = 'IMAGE_CLASSES += "testexport"\n' |
33 | # These aren't the actual IP addresses but testexport class needs something defined | 29 | # These aren't the actual IP addresses but testexport class needs something defined |
34 | features += 'TEST_SERVER_IP = "192.168.7.1"\n' | 30 | features += 'TEST_SERVER_IP = "192.168.7.1"\n' |
35 | features += 'TEST_TARGET_IP = "192.168.7.1"\n' | 31 | features += 'TEST_TARGET_IP = "192.168.7.1"\n' |
@@ -70,7 +66,7 @@ class TestExport(OESelftestTestCase): | |||
70 | Author: Mariano Lopez <mariano.lopez@intel.com> | 66 | Author: Mariano Lopez <mariano.lopez@intel.com> |
71 | """ | 67 | """ |
72 | 68 | ||
73 | features = 'INHERIT += "testexport"\n' | 69 | features = 'IMAGE_CLASSES += "testexport"\n' |
74 | # These aren't the actual IP addresses but testexport class needs something defined | 70 | # These aren't the actual IP addresses but testexport class needs something defined |
75 | features += 'TEST_SERVER_IP = "192.168.7.1"\n' | 71 | features += 'TEST_SERVER_IP = "192.168.7.1"\n' |
76 | features += 'TEST_TARGET_IP = "192.168.7.1"\n' | 72 | features += 'TEST_TARGET_IP = "192.168.7.1"\n' |
@@ -95,21 +91,23 @@ class TestExport(OESelftestTestCase): | |||
95 | msg = "Couldn't find SDK tarball: %s" % tarball_path | 91 | msg = "Couldn't find SDK tarball: %s" % tarball_path |
96 | self.assertEqual(os.path.isfile(tarball_path), True, msg) | 92 | self.assertEqual(os.path.isfile(tarball_path), True, msg) |
97 | 93 | ||
98 | # Extract SDK and run tar from SDK | 94 | with tempfile.TemporaryDirectory() as tmpdirname: |
99 | result = runCmd("%s -y -d /tmp/sdk" % tarball_path) | 95 | # Extract SDK and run tar from SDK |
100 | self.assertEqual(0, result.status, "Couldn't extract SDK") | 96 | result = runCmd("%s -y -d %s" % (tarball_path, tmpdirname)) |
97 | self.assertEqual(0, result.status, "Couldn't extract SDK") | ||
101 | 98 | ||
102 | env_script = result.output.split()[-1] | 99 | env_script = result.output.split()[-1] |
103 | result = runCmd(". %s; which tar" % env_script, shell=True) | 100 | result = runCmd(". %s; which tar" % env_script, shell=True) |
104 | self.assertEqual(0, result.status, "Couldn't setup SDK environment") | 101 | self.assertEqual(0, result.status, "Couldn't setup SDK environment") |
105 | is_sdk_tar = True if "/tmp/sdk" in result.output else False | 102 | is_sdk_tar = True if tmpdirname in result.output else False |
106 | self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment") | 103 | self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment") |
107 | 104 | ||
108 | tar_sdk = result.output | 105 | tar_sdk = result.output |
109 | result = runCmd("%s --version" % tar_sdk) | 106 | result = runCmd("%s --version" % tar_sdk) |
110 | self.assertEqual(0, result.status, "Couldn't run tar from SDK") | 107 | self.assertEqual(0, result.status, "Couldn't run tar from SDK") |
111 | 108 | ||
112 | 109 | ||
110 | @OETestTag("runqemu") | ||
113 | class TestImage(OESelftestTestCase): | 111 | class TestImage(OESelftestTestCase): |
114 | 112 | ||
115 | def test_testimage_install(self): | 113 | def test_testimage_install(self): |
@@ -123,15 +121,30 @@ class TestImage(OESelftestTestCase): | |||
123 | if get_bb_var('DISTRO') == 'poky-tiny': | 121 | if get_bb_var('DISTRO') == 'poky-tiny': |
124 | self.skipTest('core-image-full-cmdline not buildable for poky-tiny') | 122 | self.skipTest('core-image-full-cmdline not buildable for poky-tiny') |
125 | 123 | ||
126 | features = 'INHERIT += "testimage"\n' | 124 | features = 'IMAGE_CLASSES += "testimage"\n' |
127 | features += 'IMAGE_INSTALL_append = " libssl"\n' | 125 | features += 'IMAGE_INSTALL:append = " libssl"\n' |
128 | features += 'TEST_SUITES = "ping ssh selftest"\n' | 126 | features += 'TEST_SUITES = "ping ssh selftest"\n' |
129 | self.write_config(features) | 127 | self.write_config(features) |
130 | 128 | ||
131 | # Build core-image-sato and testimage | ||
132 | bitbake('core-image-full-cmdline socat') | 129 | bitbake('core-image-full-cmdline socat') |
133 | bitbake('-c testimage core-image-full-cmdline') | 130 | bitbake('-c testimage core-image-full-cmdline') |
134 | 131 | ||
132 | def test_testimage_slirp(self): | ||
133 | """ | ||
134 | Summary: Check basic testimage functionality with qemu and slirp networking. | ||
135 | """ | ||
136 | |||
137 | features = ''' | ||
138 | IMAGE_CLASSES:append = " testimage" | ||
139 | IMAGE_FEATURES:append = " ssh-server-dropbear" | ||
140 | IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("IMAGE_CLASSES", "testimage", " + 5120", "", d)}" | ||
141 | TEST_RUNQEMUPARAMS += " slirp" | ||
142 | ''' | ||
143 | self.write_config(features) | ||
144 | |||
145 | bitbake('core-image-minimal') | ||
146 | bitbake('-c testimage core-image-minimal') | ||
147 | |||
135 | def test_testimage_dnf(self): | 148 | def test_testimage_dnf(self): |
136 | """ | 149 | """ |
137 | Summary: Check package feeds functionality for dnf | 150 | Summary: Check package feeds functionality for dnf |
@@ -142,7 +155,7 @@ class TestImage(OESelftestTestCase): | |||
142 | if get_bb_var('DISTRO') == 'poky-tiny': | 155 | if get_bb_var('DISTRO') == 'poky-tiny': |
143 | self.skipTest('core-image-full-cmdline not buildable for poky-tiny') | 156 | self.skipTest('core-image-full-cmdline not buildable for poky-tiny') |
144 | 157 | ||
145 | features = 'INHERIT += "testimage"\n' | 158 | features = 'IMAGE_CLASSES += "testimage"\n' |
146 | features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n' | 159 | features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n' |
147 | # We don't yet know what the server ip and port will be - they will be patched | 160 | # We don't yet know what the server ip and port will be - they will be patched |
148 | # in at the start of the on-image test | 161 | # in at the start of the on-image test |
@@ -164,10 +177,49 @@ class TestImage(OESelftestTestCase): | |||
164 | features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home | 177 | features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home |
165 | self.write_config(features) | 178 | self.write_config(features) |
166 | 179 | ||
180 | bitbake('core-image-full-cmdline socat') | ||
181 | bitbake('-c testimage core-image-full-cmdline') | ||
182 | |||
183 | def test_testimage_apt(self): | ||
184 | """ | ||
185 | Summary: Check package feeds functionality for apt | ||
186 | Expected: 1. Check that remote package feeds can be accessed | ||
187 | Product: oe-core | ||
188 | Author: Ferry Toth <fntoth@gmail.com> | ||
189 | """ | ||
190 | if get_bb_var('DISTRO') == 'poky-tiny': | ||
191 | self.skipTest('core-image-full-cmdline not buildable for poky-tiny') | ||
192 | |||
193 | features = 'IMAGE_CLASSES += "testimage"\n' | ||
194 | features += 'TEST_SUITES = "ping ssh apt.AptRepoTest.test_apt_install_from_repo"\n' | ||
195 | # We don't yet know what the server ip and port will be - they will be patched | ||
196 | # in at the start of the on-image test | ||
197 | features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n' | ||
198 | features += 'EXTRA_IMAGE_FEATURES += "package-management"\n' | ||
199 | features += 'PACKAGE_CLASSES = "package_deb"\n' | ||
200 | # We need gnupg on the target to install keys | ||
201 | features += 'IMAGE_INSTALL:append:pn-core-image-full-cmdline = " gnupg"\n' | ||
202 | |||
203 | bitbake('gnupg-native -c addto_recipe_sysroot') | ||
204 | |||
205 | # Enable package feed signing | ||
206 | self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-") | ||
207 | self.track_for_cleanup(self.gpg_home) | ||
208 | signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing') | ||
209 | runCmd('gpgconf --list-dirs --homedir %s; gpg -v --batch --homedir %s --import %s' % (self.gpg_home, self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"), shell=True) | ||
210 | features += 'INHERIT += "sign_package_feed"\n' | ||
211 | features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n' | ||
212 | features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase') | ||
213 | features += 'GPG_PATH = "%s"\n' % self.gpg_home | ||
214 | features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home | ||
215 | self.write_config(features) | ||
216 | |||
167 | # Build core-image-sato and testimage | 217 | # Build core-image-sato and testimage |
168 | bitbake('core-image-full-cmdline socat') | 218 | bitbake('core-image-full-cmdline socat') |
169 | bitbake('-c testimage core-image-full-cmdline') | 219 | bitbake('-c testimage core-image-full-cmdline') |
170 | 220 | ||
221 | # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14966 | ||
222 | @skipIfNotMachine("qemux86-64", "test needs qemux86-64") | ||
171 | def test_testimage_virgl_gtk_sdl(self): | 223 | def test_testimage_virgl_gtk_sdl(self): |
172 | """ | 224 | """ |
173 | Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends | 225 | Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends |
@@ -190,25 +242,26 @@ class TestImage(OESelftestTestCase): | |||
190 | 242 | ||
191 | qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') | 243 | qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') |
192 | qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') | 244 | qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') |
193 | features = 'INHERIT += "testimage"\n' | 245 | features = 'IMAGE_CLASSES += "testimage"\n' |
194 | if 'gtk+' not in qemu_packageconfig: | 246 | if 'gtk+' not in qemu_packageconfig: |
195 | features += 'PACKAGECONFIG_append_pn-qemu-system-native = " gtk+"\n' | 247 | features += 'PACKAGECONFIG:append:pn-qemu-system-native = " gtk+"\n' |
196 | if 'sdl' not in qemu_packageconfig: | 248 | if 'sdl' not in qemu_packageconfig: |
197 | features += 'PACKAGECONFIG_append_pn-qemu-system-native = " sdl"\n' | 249 | features += 'PACKAGECONFIG:append:pn-qemu-system-native = " sdl"\n' |
198 | if 'opengl' not in qemu_distrofeatures: | 250 | if 'opengl' not in qemu_distrofeatures: |
199 | features += 'DISTRO_FEATURES_append = " opengl"\n' | 251 | features += 'DISTRO_FEATURES:append = " opengl"\n' |
200 | features += 'TEST_SUITES = "ping ssh virgl"\n' | 252 | features += 'TEST_SUITES = "ping ssh virgl"\n' |
201 | features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n' | 253 | features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n' |
202 | features += 'IMAGE_INSTALL_append = " kmscube"\n' | 254 | features += 'IMAGE_INSTALL:append = " kmscube"\n' |
203 | features_gtk = features + 'TEST_RUNQEMUPARAMS = "gtk gl"\n' | 255 | features_gtk = features + 'TEST_RUNQEMUPARAMS += " gtk gl"\n' |
204 | self.write_config(features_gtk) | 256 | self.write_config(features_gtk) |
205 | bitbake('core-image-minimal') | 257 | bitbake('core-image-minimal') |
206 | bitbake('-c testimage core-image-minimal') | 258 | bitbake('-c testimage core-image-minimal') |
207 | features_sdl = features + 'TEST_RUNQEMUPARAMS = "sdl gl"\n' | 259 | features_sdl = features + 'TEST_RUNQEMUPARAMS += " sdl gl"\n' |
208 | self.write_config(features_sdl) | 260 | self.write_config(features_sdl) |
209 | bitbake('core-image-minimal') | 261 | bitbake('core-image-minimal') |
210 | bitbake('-c testimage core-image-minimal') | 262 | bitbake('-c testimage core-image-minimal') |
211 | 263 | ||
264 | @skipIfNotMachine("qemux86-64", "test needs qemux86-64") | ||
212 | def test_testimage_virgl_headless(self): | 265 | def test_testimage_virgl_headless(self): |
213 | """ | 266 | """ |
214 | Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend | 267 | Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend |
@@ -218,28 +271,25 @@ class TestImage(OESelftestTestCase): | |||
218 | Author: Alexander Kanavin <alex.kanavin@gmail.com> | 271 | Author: Alexander Kanavin <alex.kanavin@gmail.com> |
219 | """ | 272 | """ |
220 | import subprocess, os | 273 | import subprocess, os |
221 | try: | 274 | |
222 | content = os.listdir("/dev/dri") | 275 | distro = oe.lsb.distro_identifier() |
223 | if len([i for i in content if i.startswith('render')]) == 0: | 276 | if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04'] or |
224 | self.skipTest("No render nodes found in /dev/dri: %s" %(content)) | 277 | distro.startswith('almalinux') or distro.startswith('rocky')): |
225 | except FileNotFoundError: | 278 | self.skipTest('virgl headless cannot be tested with %s' %(distro)) |
226 | self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine.") | 279 | |
227 | try: | ||
228 | dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True) | ||
229 | except subprocess.CalledProcessError as e: | ||
230 | self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.") | ||
231 | qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') | 280 | qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') |
232 | features = 'INHERIT += "testimage"\n' | 281 | features = 'IMAGE_CLASSES += "testimage"\n' |
233 | if 'opengl' not in qemu_distrofeatures: | 282 | if 'opengl' not in qemu_distrofeatures: |
234 | features += 'DISTRO_FEATURES_append = " opengl"\n' | 283 | features += 'DISTRO_FEATURES:append = " opengl"\n' |
235 | features += 'TEST_SUITES = "ping ssh virgl"\n' | 284 | features += 'TEST_SUITES = "ping ssh virgl"\n' |
236 | features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n' | 285 | features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n' |
237 | features += 'IMAGE_INSTALL_append = " kmscube"\n' | 286 | features += 'IMAGE_INSTALL:append = " kmscube"\n' |
238 | features += 'TEST_RUNQEMUPARAMS = "egl-headless"\n' | 287 | features += 'TEST_RUNQEMUPARAMS += " egl-headless"\n' |
239 | self.write_config(features) | 288 | self.write_config(features) |
240 | bitbake('core-image-minimal') | 289 | bitbake('core-image-minimal') |
241 | bitbake('-c testimage core-image-minimal') | 290 | bitbake('-c testimage core-image-minimal') |
242 | 291 | ||
292 | @OETestTag("runqemu") | ||
243 | class Postinst(OESelftestTestCase): | 293 | class Postinst(OESelftestTestCase): |
244 | 294 | ||
245 | def init_manager_loop(self, init_manager): | 295 | def init_manager_loop(self, init_manager): |
@@ -260,7 +310,7 @@ class Postinst(OESelftestTestCase): | |||
260 | features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' | 310 | features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' |
261 | features += 'PACKAGE_CLASSES = "%s"\n' % classes | 311 | features += 'PACKAGE_CLASSES = "%s"\n' % classes |
262 | if init_manager == "systemd": | 312 | if init_manager == "systemd": |
263 | features += 'DISTRO_FEATURES_append = " systemd"\n' | 313 | features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n' |
264 | features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n' | 314 | features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n' |
265 | features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n' | 315 | features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n' |
266 | features += 'VIRTUAL-RUNTIME_initscripts = ""\n' | 316 | features += 'VIRTUAL-RUNTIME_initscripts = ""\n' |
@@ -280,7 +330,7 @@ class Postinst(OESelftestTestCase): | |||
280 | 330 | ||
281 | 331 | ||
282 | 332 | ||
283 | @skipIfNotQemu('qemuall', 'Test only runs in qemu') | 333 | @skipIfNotQemu() |
284 | def test_postinst_rootfs_and_boot_sysvinit(self): | 334 | def test_postinst_rootfs_and_boot_sysvinit(self): |
285 | """ | 335 | """ |
286 | Summary: The purpose of this test case is to verify Post-installation | 336 | Summary: The purpose of this test case is to verify Post-installation |
@@ -301,7 +351,7 @@ class Postinst(OESelftestTestCase): | |||
301 | self.init_manager_loop("sysvinit") | 351 | self.init_manager_loop("sysvinit") |
302 | 352 | ||
303 | 353 | ||
304 | @skipIfNotQemu('qemuall', 'Test only runs in qemu') | 354 | @skipIfNotQemu() |
305 | def test_postinst_rootfs_and_boot_systemd(self): | 355 | def test_postinst_rootfs_and_boot_systemd(self): |
306 | """ | 356 | """ |
307 | Summary: The purpose of this test case is to verify Post-installation | 357 | Summary: The purpose of this test case is to verify Post-installation |
@@ -357,6 +407,7 @@ class Postinst(OESelftestTestCase): | |||
357 | self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")), | 407 | self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")), |
358 | "rootfs-after-failure file was created") | 408 | "rootfs-after-failure file was created") |
359 | 409 | ||
410 | @OETestTag("runqemu") | ||
360 | class SystemTap(OESelftestTestCase): | 411 | class SystemTap(OESelftestTestCase): |
361 | """ | 412 | """ |
362 | Summary: The purpose of this test case is to verify native crosstap | 413 | Summary: The purpose of this test case is to verify native crosstap |
@@ -377,14 +428,14 @@ TEST_SERVER_IP = "192.168.7.1" | |||
377 | TEST_TARGET_IP = "192.168.7.2" | 428 | TEST_TARGET_IP = "192.168.7.2" |
378 | 429 | ||
379 | EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs" | 430 | EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs" |
380 | IMAGE_FEATURES_append = " ssh-server-dropbear" | 431 | IMAGE_FEATURES:append = " ssh-server-dropbear" |
381 | 432 | ||
382 | # enables kernel debug symbols | 433 | # enables kernel debug symbols |
383 | KERNEL_EXTRA_FEATURES_append = " features/debug/debug-kernel.scc" | 434 | KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc" |
384 | KERNEL_EXTRA_FEATURES_append = " features/systemtap/systemtap.scc" | 435 | KERNEL_EXTRA_FEATURES:append = " features/systemtap/systemtap.scc" |
385 | 436 | ||
386 | # add systemtap run-time into target image if it is not there yet | 437 | # add systemtap run-time into target image if it is not there yet |
387 | IMAGE_INSTALL_append = " systemtap-runtime" | 438 | IMAGE_INSTALL:append = " systemtap-runtime" |
388 | """ | 439 | """ |
389 | 440 | ||
390 | def test_crosstap_helloworld(self): | 441 | def test_crosstap_helloworld(self): |
@@ -433,4 +484,3 @@ IMAGE_INSTALL_append = " systemtap-runtime" | |||
433 | cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples | 484 | cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples |
434 | result = runCmd(cmd) | 485 | result = runCmd(cmd) |
435 | self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output) | 486 | self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output) |
436 | |||
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py new file mode 100644 index 0000000000..ad14189c6d --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/rust.py | |||
@@ -0,0 +1,231 @@ | |||
1 | # SPDX-License-Identifier: MIT | ||
2 | import os | ||
3 | import subprocess | ||
4 | import time | ||
5 | from oeqa.core.decorator import OETestTag | ||
6 | from oeqa.core.case import OEPTestResultTestCase | ||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu, Command | ||
9 | from oeqa.utils.sshcontrol import SSHControl | ||
10 | |||
11 | def parse_results(filename): | ||
12 | tests = {} | ||
13 | with open(filename, "r") as f: | ||
14 | lines = f.readlines() | ||
15 | for line in lines: | ||
16 | if "..." in line and "test [" in line: | ||
17 | test = line.split("test ")[1].split(" ... ")[0] | ||
18 | if "] " in test: | ||
19 | test = test.split("] ", 1)[1] | ||
20 | result = line.split(" ... ")[1].strip() | ||
21 | if result == "ok": | ||
22 | result = "PASS" | ||
23 | elif result == "failed": | ||
24 | result = "FAIL" | ||
25 | elif "ignored" in result: | ||
26 | result = "SKIPPED" | ||
27 | if test in tests: | ||
28 | if tests[test] != result: | ||
29 | print("Duplicate and mismatching result %s for %s" % (result, test)) | ||
30 | else: | ||
31 | print("Duplicate result %s for %s" % (result, test)) | ||
32 | else: | ||
33 | tests[test] = result | ||
34 | return tests | ||
35 | |||
36 | # Total time taken for testing is of about 2hr 20min, with PARALLEL_MAKE set to 40 number of jobs. | ||
37 | @OETestTag("toolchain-system") | ||
38 | @OETestTag("toolchain-user") | ||
39 | @OETestTag("runqemu") | ||
40 | class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase): | ||
41 | def test_rust(self, *args, **kwargs): | ||
42 | # Disable Rust Oe-selftest | ||
43 | #self.skipTest("The Rust Oe-selftest is disabled.") | ||
44 | |||
45 | # Skip mips32 target since it is unstable with rust tests | ||
46 | machine = get_bb_var('MACHINE') | ||
47 | if machine == "qemumips": | ||
48 | self.skipTest("The mips32 target is skipped for Rust Oe-selftest.") | ||
49 | |||
50 | # build remote-test-server before image build | ||
51 | recipe = "rust" | ||
52 | start_time = time.time() | ||
53 | bitbake("{} -c test_compile".format(recipe)) | ||
54 | builddir = get_bb_var("RUSTSRC", "rust") | ||
55 | # build core-image-minimal with required packages | ||
56 | default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"] | ||
57 | features = [] | ||
58 | features.append('IMAGE_FEATURES += "ssh-server-dropbear"') | ||
59 | features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages))) | ||
60 | self.write_config("\n".join(features)) | ||
61 | bitbake("core-image-minimal") | ||
62 | |||
63 | # Exclude the test folders that error out while building | ||
64 | # TODO: Fix the errors and include them for testing | ||
65 | # no-fail-fast: Run all tests regardless of failure. | ||
66 | # bless: First runs rustfmt to format the codebase, | ||
67 | # then runs tidy checks. | ||
68 | exclude_list = [ | ||
69 | 'compiler/rustc', | ||
70 | 'compiler/rustc_interface/src/tests.rs', | ||
71 | 'library/panic_abort', | ||
72 | 'library/panic_unwind', | ||
73 | 'library/test/src/stats/tests.rs', | ||
74 | 'src/bootstrap/builder/tests.rs', | ||
75 | 'src/doc/rustc', | ||
76 | 'src/doc/rustdoc', | ||
77 | 'src/doc/unstable-book', | ||
78 | 'src/librustdoc', | ||
79 | 'src/rustdoc-json-types', | ||
80 | 'src/tools/compiletest/src/common.rs', | ||
81 | 'src/tools/lint-docs', | ||
82 | 'src/tools/rust-analyzer', | ||
83 | 'src/tools/rustdoc-themes', | ||
84 | 'src/tools/tidy', | ||
85 | 'tests/assembly/asm/aarch64-outline-atomics.rs', | ||
86 | 'tests/codegen/abi-main-signature-32bit-c-int.rs', | ||
87 | 'tests/codegen/abi-repr-ext.rs', | ||
88 | 'tests/codegen/abi-x86-interrupt.rs', | ||
89 | 'tests/codegen/branch-protection.rs', | ||
90 | 'tests/codegen/catch-unwind.rs', | ||
91 | 'tests/codegen/cf-protection.rs', | ||
92 | 'tests/codegen/enum-bounds-check-derived-idx.rs', | ||
93 | 'tests/codegen/force-unwind-tables.rs', | ||
94 | 'tests/codegen/intrinsic-no-unnamed-attr.rs', | ||
95 | 'tests/codegen/issues/issue-103840.rs', | ||
96 | 'tests/codegen/issues/issue-47278.rs', | ||
97 | 'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs', | ||
98 | 'tests/codegen/lifetime_start_end.rs', | ||
99 | 'tests/codegen/local-generics-in-exe-internalized.rs', | ||
100 | 'tests/codegen/match-unoptimized.rs', | ||
101 | 'tests/codegen/noalias-rwlockreadguard.rs', | ||
102 | 'tests/codegen/non-terminate/nonempty-infinite-loop.rs', | ||
103 | 'tests/codegen/noreturn-uninhabited.rs', | ||
104 | 'tests/codegen/repr-transparent-aggregates-3.rs', | ||
105 | 'tests/codegen/riscv-abi/call-llvm-intrinsics.rs', | ||
106 | 'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs', | ||
107 | 'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs', | ||
108 | 'tests/codegen/sse42-implies-crc32.rs', | ||
109 | 'tests/codegen/thread-local.rs', | ||
110 | 'tests/codegen/uninit-consts.rs', | ||
111 | 'tests/pretty/raw-str-nonexpr.rs', | ||
112 | 'tests/run-make', | ||
113 | 'tests/run-make-fulldeps', | ||
114 | 'tests/rustdoc', | ||
115 | 'tests/rustdoc-json', | ||
116 | 'tests/rustdoc-js-std', | ||
117 | 'tests/rustdoc-ui/cfg-test.rs', | ||
118 | 'tests/rustdoc-ui/check-cfg-test.rs', | ||
119 | 'tests/rustdoc-ui/display-output.rs', | ||
120 | 'tests/rustdoc-ui/doc-comment-multi-line-attr.rs', | ||
121 | 'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs', | ||
122 | 'tests/rustdoc-ui/doc-test-doctest-feature.rs', | ||
123 | 'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs', | ||
124 | 'tests/rustdoc-ui/doctest-output.rs', | ||
125 | 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs', | ||
126 | 'tests/rustdoc-ui/failed-doctest-compile-fail.rs', | ||
127 | 'tests/rustdoc-ui/issue-80992.rs', | ||
128 | 'tests/rustdoc-ui/issue-91134.rs', | ||
129 | 'tests/rustdoc-ui/nocapture-fail.rs', | ||
130 | 'tests/rustdoc-ui/nocapture.rs', | ||
131 | 'tests/rustdoc-ui/no-run-flag.rs', | ||
132 | 'tests/rustdoc-ui/run-directory.rs', | ||
133 | 'tests/rustdoc-ui/test-no_std.rs', | ||
134 | 'tests/rustdoc-ui/test-type.rs', | ||
135 | 'tests/rustdoc/unit-return.rs', | ||
136 | 'tests/ui/abi/stack-probes-lto.rs', | ||
137 | 'tests/ui/abi/stack-probes.rs', | ||
138 | 'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs', | ||
139 | 'tests/ui/asm/x86_64/sym.rs', | ||
140 | 'tests/ui/associated-type-bounds/fn-apit.rs', | ||
141 | 'tests/ui/associated-type-bounds/fn-dyn-apit.rs', | ||
142 | 'tests/ui/associated-type-bounds/fn-wrap-apit.rs', | ||
143 | 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs', | ||
144 | 'tests/ui/drop/dynamic-drop.rs', | ||
145 | 'tests/ui/empty_global_asm.rs', | ||
146 | 'tests/ui/functions-closures/fn-help-with-err.rs', | ||
147 | 'tests/ui/linkage-attr/issue-10755.rs', | ||
148 | 'tests/ui/macros/restricted-shadowing-legacy.rs', | ||
149 | 'tests/ui/process/nofile-limit.rs', | ||
150 | 'tests/ui/process/process-panic-after-fork.rs', | ||
151 | 'tests/ui/process/process-sigpipe.rs', | ||
152 | 'tests/ui/simd/target-feature-mixup.rs', | ||
153 | 'tests/ui/structs-enums/multiple-reprs.rs', | ||
154 | 'src/tools/jsondoclint', | ||
155 | 'src/tools/replace-version-placeholder', | ||
156 | 'tests/codegen/abi-efiapi.rs', | ||
157 | 'tests/codegen/abi-sysv64.rs', | ||
158 | 'tests/codegen/align-byval.rs', | ||
159 | 'tests/codegen/align-fn.rs', | ||
160 | 'tests/codegen/asm-powerpc-clobbers.rs', | ||
161 | 'tests/codegen/async-fn-debug-awaitee-field.rs', | ||
162 | 'tests/codegen/binary-search-index-no-bound-check.rs', | ||
163 | 'tests/codegen/call-metadata.rs', | ||
164 | 'tests/codegen/debug-column.rs', | ||
165 | 'tests/codegen/debug-limited.rs', | ||
166 | 'tests/codegen/debuginfo-generic-closure-env-names.rs', | ||
167 | 'tests/codegen/drop.rs', | ||
168 | 'tests/codegen/dst-vtable-align-nonzero.rs', | ||
169 | 'tests/codegen/enable-lto-unit-splitting.rs', | ||
170 | 'tests/codegen/enum/enum-u128.rs', | ||
171 | 'tests/codegen/fn-impl-trait-self.rs', | ||
172 | 'tests/codegen/inherit_overflow.rs', | ||
173 | 'tests/codegen/inline-function-args-debug-info.rs', | ||
174 | 'tests/codegen/intrinsics/mask.rs', | ||
175 | 'tests/codegen/intrinsics/transmute-niched.rs', | ||
176 | 'tests/codegen/issues/issue-73258.rs', | ||
177 | 'tests/codegen/issues/issue-75546.rs', | ||
178 | 'tests/codegen/issues/issue-77812.rs', | ||
179 | 'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs', | ||
180 | 'tests/codegen/llvm-ident.rs', | ||
181 | 'tests/codegen/mainsubprogram.rs', | ||
182 | 'tests/codegen/move-operands.rs', | ||
183 | 'tests/codegen/repr/transparent-mips64.rs', | ||
184 | 'tests/mir-opt/', | ||
185 | 'tests/rustdoc-json', | ||
186 | 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs', | ||
187 | 'tests/rustdoc-ui/no-run-flag.rs', | ||
188 | 'tests/ui-fulldeps/', | ||
189 | 'tests/ui/numbers-arithmetic/u128.rs' | ||
190 | ] | ||
191 | |||
192 | exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list]) | ||
193 | # Add exclude_fail_tests with other test arguments | ||
194 | testargs = exclude_fail_tests + " --doc --no-fail-fast --bless" | ||
195 | |||
196 | # wrap the execution with a qemu instance. | ||
197 | # Tests are run with 512 tasks in parallel to execute all tests very quickly | ||
198 | with runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 512") as qemu: | ||
199 | # Copy remote-test-server to image through scp | ||
200 | host_sys = get_bb_var("RUST_BUILD_SYS", "rust") | ||
201 | ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root") | ||
202 | ssh.copy_to(builddir + "/build/" + host_sys + "/stage1-tools-bin/remote-test-server","~/") | ||
203 | # Execute remote-test-server on image through background ssh | ||
204 | command = '~/remote-test-server --bind 0.0.0.0:12345 -v' | ||
205 | sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
206 | # Get the values of variables. | ||
207 | tcpath = get_bb_var("TARGET_SYS", "rust") | ||
208 | targetsys = get_bb_var("RUST_TARGET_SYS", "rust") | ||
209 | rustlibpath = get_bb_var("WORKDIR", "rust") | ||
210 | tmpdir = get_bb_var("TMPDIR", "rust") | ||
211 | |||
212 | # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools. | ||
213 | cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath | ||
214 | cmd = cmd + " export TARGET_VENDOR=\"-poky\";" | ||
215 | cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir) | ||
216 | cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath | ||
217 | # Trigger testing. | ||
218 | cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip | ||
219 | cmd = cmd + " cd %s; python3 src/bootstrap/bootstrap.py test %s --target %s" % (builddir, testargs, targetsys) | ||
220 | retval = runCmd(cmd) | ||
221 | end_time = time.time() | ||
222 | |||
223 | resultlog = rustlibpath + "/results-log.txt" | ||
224 | with open(resultlog, "w") as f: | ||
225 | f.write(retval.output) | ||
226 | |||
227 | ptestsuite = "rust" | ||
228 | self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile=resultlog) | ||
229 | test_results = parse_results(resultlog) | ||
230 | for test in test_results: | ||
231 | self.ptest_result(ptestsuite, test, test_results[test]) | ||
diff --git a/meta/lib/oeqa/selftest/cases/selftest.py b/meta/lib/oeqa/selftest/cases/selftest.py index af080dcf03..a80a8651a5 100644 --- a/meta/lib/oeqa/selftest/cases/selftest.py +++ b/meta/lib/oeqa/selftest/cases/selftest.py | |||
@@ -1,9 +1,10 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | import importlib | 7 | import importlib |
6 | from oeqa.utils.commands import runCmd | ||
7 | import oeqa.selftest | 8 | import oeqa.selftest |
8 | from oeqa.selftest.case import OESelftestTestCase | 9 | from oeqa.selftest.case import OESelftestTestCase |
9 | 10 | ||
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py index a28c7eb19a..18cce0ba25 100644 --- a/meta/lib/oeqa/selftest/cases/signing.py +++ b/meta/lib/oeqa/selftest/cases/signing.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -145,7 +147,7 @@ class Signing(OESelftestTestCase): | |||
145 | feature += 'GPG_PATH = "%s"\n' % self.gpg_dir | 147 | feature += 'GPG_PATH = "%s"\n' % self.gpg_dir |
146 | feature += 'SSTATE_DIR = "%s"\n' % sstatedir | 148 | feature += 'SSTATE_DIR = "%s"\n' % sstatedir |
147 | # Any mirror might have partial sstate without .sig files, triggering failures | 149 | # Any mirror might have partial sstate without .sig files, triggering failures |
148 | feature += 'SSTATE_MIRRORS_forcevariable = ""\n' | 150 | feature += 'SSTATE_MIRRORS:forcevariable = ""\n' |
149 | 151 | ||
150 | self.write_config(feature) | 152 | self.write_config(feature) |
151 | 153 | ||
@@ -159,13 +161,13 @@ class Signing(OESelftestTestCase): | |||
159 | bitbake('-c clean %s' % test_recipe) | 161 | bitbake('-c clean %s' % test_recipe) |
160 | bitbake('-c populate_lic %s' % test_recipe) | 162 | bitbake('-c populate_lic %s' % test_recipe) |
161 | 163 | ||
162 | recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz.sig') | 164 | recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst.sig') |
163 | recipe_tgz = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz') | 165 | recipe_archive = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst') |
164 | 166 | ||
165 | self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.') | 167 | self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.') |
166 | self.assertEqual(len(recipe_tgz), 1, 'Failed to find .tgz file.') | 168 | self.assertEqual(len(recipe_archive), 1, 'Failed to find .tar.zst file.') |
167 | 169 | ||
168 | ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_tgz[0])) | 170 | ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_archive[0])) |
169 | # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30 | 171 | # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30 |
170 | # gpg: Good signature from "testuser (nocomment) <testuser@email.com>" | 172 | # gpg: Good signature from "testuser (nocomment) <testuser@email.com>" |
171 | self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.') | 173 | self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.') |
@@ -189,7 +191,7 @@ class LockedSignatures(OESelftestTestCase): | |||
189 | 191 | ||
190 | bitbake(test_recipe) | 192 | bitbake(test_recipe) |
191 | # Generate locked sigs include file | 193 | # Generate locked sigs include file |
192 | bitbake('-S none %s' % test_recipe) | 194 | bitbake('-S lockedsigs %s' % test_recipe) |
193 | 195 | ||
194 | feature = 'require %s\n' % locked_sigs_file | 196 | feature = 'require %s\n' % locked_sigs_file |
195 | feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n' | 197 | feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n' |
@@ -206,7 +208,7 @@ class LockedSignatures(OESelftestTestCase): | |||
206 | # Use uuid so hash equivalance server isn't triggered | 208 | # Use uuid so hash equivalance server isn't triggered |
207 | recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend' | 209 | recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend' |
208 | recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file) | 210 | recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file) |
209 | feature = 'SUMMARY_${PN} = "test locked signature%s"\n' % uuid.uuid4() | 211 | feature = 'SUMMARY:${PN} = "test locked signature%s"\n' % uuid.uuid4() |
210 | 212 | ||
211 | os.mkdir(os.path.join(templayerdir, 'recipes-test')) | 213 | os.mkdir(os.path.join(templayerdir, 'recipes-test')) |
212 | os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe)) | 214 | os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe)) |
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py new file mode 100644 index 0000000000..05fc4e390b --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/spdx.py | |||
@@ -0,0 +1,54 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import json | ||
8 | import os | ||
9 | from oeqa.selftest.case import OESelftestTestCase | ||
10 | from oeqa.utils.commands import bitbake, get_bb_var, runCmd | ||
11 | |||
12 | class SPDXCheck(OESelftestTestCase): | ||
13 | |||
14 | @classmethod | ||
15 | def setUpClass(cls): | ||
16 | super(SPDXCheck, cls).setUpClass() | ||
17 | bitbake("python3-spdx-tools-native") | ||
18 | bitbake("-c addto_recipe_sysroot python3-spdx-tools-native") | ||
19 | |||
20 | def check_recipe_spdx(self, high_level_dir, spdx_file, target_name): | ||
21 | config = """ | ||
22 | INHERIT += "create-spdx" | ||
23 | """ | ||
24 | self.write_config(config) | ||
25 | |||
26 | deploy_dir = get_bb_var("DEPLOY_DIR") | ||
27 | machine_var = get_bb_var("MACHINE") | ||
28 | # qemux86-64 creates the directory qemux86_64 | ||
29 | machine_dir = machine_var.replace("-", "_") | ||
30 | |||
31 | full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file) | ||
32 | |||
33 | try: | ||
34 | os.remove(full_file_path) | ||
35 | except FileNotFoundError: | ||
36 | pass | ||
37 | |||
38 | bitbake("%s -c create_spdx" % target_name) | ||
39 | |||
40 | def check_spdx_json(filename): | ||
41 | with open(filename) as f: | ||
42 | report = json.load(f) | ||
43 | self.assertNotEqual(report, None) | ||
44 | self.assertNotEqual(report["SPDXID"], None) | ||
45 | |||
46 | python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3') | ||
47 | validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools') | ||
48 | result = runCmd("{} {} -i {}".format(python, validator, filename)) | ||
49 | |||
50 | self.assertExists(full_file_path) | ||
51 | result = check_spdx_json(full_file_path) | ||
52 | |||
53 | def test_spdx_base_files(self): | ||
54 | self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files") | ||
diff --git a/meta/lib/oeqa/selftest/cases/sstate.py b/meta/lib/oeqa/selftest/cases/sstate.py deleted file mode 100644 index 80ce9e353c..0000000000 --- a/meta/lib/oeqa/selftest/cases/sstate.py +++ /dev/null | |||
@@ -1,67 +0,0 @@ | |||
1 | # | ||
2 | # SPDX-License-Identifier: MIT | ||
3 | # | ||
4 | |||
5 | import datetime | ||
6 | import unittest | ||
7 | import os | ||
8 | import re | ||
9 | import shutil | ||
10 | |||
11 | import oeqa.utils.ftools as ftools | ||
12 | from oeqa.selftest.case import OESelftestTestCase | ||
13 | from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_test_layer | ||
14 | |||
15 | |||
16 | class SStateBase(OESelftestTestCase): | ||
17 | |||
18 | def setUpLocal(self): | ||
19 | super(SStateBase, self).setUpLocal() | ||
20 | self.temp_sstate_location = None | ||
21 | needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH', | ||
22 | 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS'] | ||
23 | bb_vars = get_bb_vars(needed_vars) | ||
24 | self.sstate_path = bb_vars['SSTATE_DIR'] | ||
25 | self.hostdistro = bb_vars['NATIVELSBSTRING'] | ||
26 | self.tclibc = bb_vars['TCLIBC'] | ||
27 | self.tune_arch = bb_vars['TUNE_ARCH'] | ||
28 | self.topdir = bb_vars['TOPDIR'] | ||
29 | self.target_vendor = bb_vars['TARGET_VENDOR'] | ||
30 | self.target_os = bb_vars['TARGET_OS'] | ||
31 | self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) | ||
32 | |||
33 | # Creates a special sstate configuration with the option to add sstate mirrors | ||
34 | def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]): | ||
35 | self.temp_sstate_location = temp_sstate_location | ||
36 | |||
37 | if self.temp_sstate_location: | ||
38 | temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S')) | ||
39 | config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path | ||
40 | self.append_config(config_temp_sstate) | ||
41 | self.track_for_cleanup(temp_sstate_path) | ||
42 | bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING']) | ||
43 | self.sstate_path = bb_vars['SSTATE_DIR'] | ||
44 | self.hostdistro = bb_vars['NATIVELSBSTRING'] | ||
45 | self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) | ||
46 | |||
47 | if add_local_mirrors: | ||
48 | config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""' | ||
49 | self.append_config(config_set_sstate_if_not_set) | ||
50 | for local_mirror in add_local_mirrors: | ||
51 | self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror') | ||
52 | config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror | ||
53 | self.append_config(config_sstate_mirror) | ||
54 | |||
55 | # Returns a list containing sstate files | ||
56 | def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True): | ||
57 | result = [] | ||
58 | for root, dirs, files in os.walk(self.sstate_path): | ||
59 | if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root): | ||
60 | for f in files: | ||
61 | if re.search(filename_regex, f): | ||
62 | result.append(f) | ||
63 | if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root): | ||
64 | for f in files: | ||
65 | if re.search(filename_regex, f): | ||
66 | result.append(f) | ||
67 | return result | ||
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py index c46e8ba489..86d6cd7464 100644 --- a/meta/lib/oeqa/selftest/cases/sstatetests.py +++ b/meta/lib/oeqa/selftest/cases/sstatetests.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -7,54 +9,77 @@ import shutil | |||
7 | import glob | 9 | import glob |
8 | import subprocess | 10 | import subprocess |
9 | import tempfile | 11 | import tempfile |
12 | import datetime | ||
13 | import re | ||
10 | 14 | ||
15 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer, get_bb_vars | ||
11 | from oeqa.selftest.case import OESelftestTestCase | 16 | from oeqa.selftest.case import OESelftestTestCase |
12 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer, create_temp_layer | 17 | from oeqa.core.decorator import OETestTag |
13 | from oeqa.selftest.cases.sstate import SStateBase | ||
14 | 18 | ||
19 | import oe | ||
15 | import bb.siggen | 20 | import bb.siggen |
16 | 21 | ||
17 | class SStateTests(SStateBase): | 22 | # Set to True to preserve stamp files after test execution for debugging failures |
18 | def test_autorev_sstate_works(self): | 23 | keep_temp_files = False |
19 | # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV} | 24 | |
20 | # when PV does not contain SRCPV | 25 | class SStateBase(OESelftestTestCase): |
21 | 26 | ||
22 | tempdir = tempfile.mkdtemp(prefix='sstate_autorev') | 27 | def setUpLocal(self): |
23 | tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir') | 28 | super(SStateBase, self).setUpLocal() |
24 | self.track_for_cleanup(tempdir) | 29 | self.temp_sstate_location = None |
25 | self.track_for_cleanup(tempdldir) | 30 | needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH', |
26 | create_temp_layer(tempdir, 'selftestrecipetool') | 31 | 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS'] |
27 | self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir) | 32 | bb_vars = get_bb_vars(needed_vars) |
28 | self.append_config("DL_DIR = \"%s\"" % tempdldir) | 33 | self.sstate_path = bb_vars['SSTATE_DIR'] |
29 | runCmd('bitbake-layers add-layer %s' % tempdir) | 34 | self.hostdistro = bb_vars['NATIVELSBSTRING'] |
30 | 35 | self.tclibc = bb_vars['TCLIBC'] | |
31 | # Use dbus-wait as a local git repo we can add a commit between two builds in | 36 | self.tune_arch = bb_vars['TUNE_ARCH'] |
32 | pn = 'dbus-wait' | 37 | self.topdir = bb_vars['TOPDIR'] |
33 | srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517' | 38 | self.target_vendor = bb_vars['TARGET_VENDOR'] |
34 | url = 'git://git.yoctoproject.org/dbus-wait' | 39 | self.target_os = bb_vars['TARGET_OS'] |
35 | result = runCmd('git clone %s noname' % url, cwd=tempdir) | 40 | self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) |
36 | srcdir = os.path.join(tempdir, 'noname') | 41 | |
37 | result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir) | 42 | def track_for_cleanup(self, path): |
38 | self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory') | 43 | if not keep_temp_files: |
39 | 44 | super().track_for_cleanup(path) | |
40 | recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb') | 45 | |
41 | os.makedirs(os.path.dirname(recipefile)) | 46 | # Creates a special sstate configuration with the option to add sstate mirrors |
42 | srcuri = 'git://' + srcdir + ';protocol=file' | 47 | def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]): |
43 | result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri]) | 48 | self.temp_sstate_location = temp_sstate_location |
44 | self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) | 49 | |
45 | 50 | if self.temp_sstate_location: | |
46 | with open(recipefile, 'a') as f: | 51 | temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S')) |
47 | f.write('SRCREV = "${AUTOREV}"\n') | 52 | config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path |
48 | f.write('PV = "1.0"\n') | 53 | self.append_config(config_temp_sstate) |
49 | 54 | self.track_for_cleanup(temp_sstate_path) | |
50 | bitbake("dbus-wait-test -c fetch") | 55 | bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING']) |
51 | with open(os.path.join(srcdir, "bar.txt"), "w") as f: | 56 | self.sstate_path = bb_vars['SSTATE_DIR'] |
52 | f.write("foo") | 57 | self.hostdistro = bb_vars['NATIVELSBSTRING'] |
53 | result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir) | 58 | self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) |
54 | bitbake("dbus-wait-test -c unpack") | 59 | |
55 | 60 | if add_local_mirrors: | |
56 | 61 | config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""' | |
57 | # Test sstate files creation and their location | 62 | self.append_config(config_set_sstate_if_not_set) |
63 | for local_mirror in add_local_mirrors: | ||
64 | self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror') | ||
65 | config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror | ||
66 | self.append_config(config_sstate_mirror) | ||
67 | |||
68 | # Returns a list containing sstate files | ||
69 | def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True): | ||
70 | result = [] | ||
71 | for root, dirs, files in os.walk(self.sstate_path): | ||
72 | if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root): | ||
73 | for f in files: | ||
74 | if re.search(filename_regex, f): | ||
75 | result.append(f) | ||
76 | if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root): | ||
77 | for f in files: | ||
78 | if re.search(filename_regex, f): | ||
79 | result.append(f) | ||
80 | return result | ||
81 | |||
82 | # Test sstate files creation and their location and directory perms | ||
58 | def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True): | 83 | def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True): |
59 | self.config_sstate(temp_sstate_location, [self.sstate_path]) | 84 | self.config_sstate(temp_sstate_location, [self.sstate_path]) |
60 | 85 | ||
@@ -63,12 +88,25 @@ class SStateTests(SStateBase): | |||
63 | else: | 88 | else: |
64 | bitbake(['-ccleansstate'] + targets) | 89 | bitbake(['-ccleansstate'] + targets) |
65 | 90 | ||
91 | # We need to test that the env umask have does not effect sstate directory creation | ||
92 | # So, first, we'll get the current umask and set it to something we know incorrect | ||
93 | # See: sstate_task_postfunc for correct umask of os.umask(0o002) | ||
94 | import os | ||
95 | def current_umask(): | ||
96 | current_umask = os.umask(0) | ||
97 | os.umask(current_umask) | ||
98 | return current_umask | ||
99 | |||
100 | orig_umask = current_umask() | ||
101 | # Set it to a umask we know will be 'wrong' | ||
102 | os.umask(0o022) | ||
103 | |||
66 | bitbake(targets) | 104 | bitbake(targets) |
67 | file_tracker = [] | 105 | file_tracker = [] |
68 | results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific) | 106 | results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific) |
69 | if distro_nonspecific: | 107 | if distro_nonspecific: |
70 | for r in results: | 108 | for r in results: |
71 | if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo", "_fetch.tgz.siginfo", "_unpack.tgz.siginfo", "_patch.tgz.siginfo")): | 109 | if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo", "_fetch.tar.zst.siginfo", "_unpack.tar.zst.siginfo", "_patch.tar.zst.siginfo")): |
72 | continue | 110 | continue |
73 | file_tracker.append(r) | 111 | file_tracker.append(r) |
74 | else: | 112 | else: |
@@ -79,17 +117,18 @@ class SStateTests(SStateBase): | |||
79 | else: | 117 | else: |
80 | self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker))) | 118 | self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker))) |
81 | 119 | ||
82 | def test_sstate_creation_distro_specific_pass(self): | 120 | # Now we'll walk the tree to check the mode and see if things are incorrect. |
83 | self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) | 121 | badperms = [] |
84 | 122 | for root, dirs, files in os.walk(self.sstate_path): | |
85 | def test_sstate_creation_distro_specific_fail(self): | 123 | for directory in dirs: |
86 | self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False) | 124 | if (os.stat(os.path.join(root, directory)).st_mode & 0o777) != 0o775: |
125 | badperms.append(os.path.join(root, directory)) | ||
87 | 126 | ||
88 | def test_sstate_creation_distro_nonspecific_pass(self): | 127 | # Return to original umask |
89 | self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) | 128 | os.umask(orig_umask) |
90 | 129 | ||
91 | def test_sstate_creation_distro_nonspecific_fail(self): | 130 | if should_pass: |
92 | self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False) | 131 | self.assertTrue(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms))) |
93 | 132 | ||
94 | # Test the sstate files deletion part of the do_cleansstate task | 133 | # Test the sstate files deletion part of the do_cleansstate task |
95 | def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True): | 134 | def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True): |
@@ -98,29 +137,15 @@ class SStateTests(SStateBase): | |||
98 | bitbake(['-ccleansstate'] + targets) | 137 | bitbake(['-ccleansstate'] + targets) |
99 | 138 | ||
100 | bitbake(targets) | 139 | bitbake(targets) |
101 | tgz_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific) | 140 | archives_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific) |
102 | self.assertTrue(tgz_created, msg="Could not find sstate .tgz files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_created))) | 141 | self.assertTrue(archives_created, msg="Could not find sstate .tar.zst files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_created))) |
103 | 142 | ||
104 | siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific) | 143 | siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific) |
105 | self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created))) | 144 | self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created))) |
106 | 145 | ||
107 | bitbake(['-ccleansstate'] + targets) | 146 | bitbake(['-ccleansstate'] + targets) |
108 | tgz_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific) | 147 | archives_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific) |
109 | self.assertTrue(not tgz_removed, msg="do_cleansstate didn't remove .tgz sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_removed))) | 148 | self.assertTrue(not archives_removed, msg="do_cleansstate didn't remove .tar.zst sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_removed))) |
110 | |||
111 | def test_cleansstate_task_distro_specific_nonspecific(self): | ||
112 | targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native'] | ||
113 | targets.append('linux-libc-headers') | ||
114 | self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True) | ||
115 | |||
116 | def test_cleansstate_task_distro_nonspecific(self): | ||
117 | self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) | ||
118 | |||
119 | def test_cleansstate_task_distro_specific(self): | ||
120 | targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native'] | ||
121 | targets.append('linux-libc-headers') | ||
122 | self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) | ||
123 | |||
124 | 149 | ||
125 | # Test rebuilding of distro-specific sstate files | 150 | # Test rebuilding of distro-specific sstate files |
126 | def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True): | 151 | def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True): |
@@ -129,15 +154,15 @@ class SStateTests(SStateBase): | |||
129 | bitbake(['-ccleansstate'] + targets) | 154 | bitbake(['-ccleansstate'] + targets) |
130 | 155 | ||
131 | bitbake(targets) | 156 | bitbake(targets) |
132 | results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=False, distro_nonspecific=True) | 157 | results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=False, distro_nonspecific=True) |
133 | filtered_results = [] | 158 | filtered_results = [] |
134 | for r in results: | 159 | for r in results: |
135 | if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo")): | 160 | if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo")): |
136 | continue | 161 | continue |
137 | filtered_results.append(r) | 162 | filtered_results.append(r) |
138 | self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results))) | 163 | self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results))) |
139 | file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False) | 164 | file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False) |
140 | self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets))) | 165 | self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets))) |
141 | 166 | ||
142 | self.track_for_cleanup(self.distro_specific_sstate + "_old") | 167 | self.track_for_cleanup(self.distro_specific_sstate + "_old") |
143 | shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old") | 168 | shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old") |
@@ -145,15 +170,114 @@ class SStateTests(SStateBase): | |||
145 | 170 | ||
146 | bitbake(['-cclean'] + targets) | 171 | bitbake(['-cclean'] + targets) |
147 | bitbake(targets) | 172 | bitbake(targets) |
148 | file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False) | 173 | file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False) |
149 | self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets))) | 174 | self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets))) |
150 | 175 | ||
151 | not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2] | 176 | not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2] |
152 | self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated))) | 177 | self.assertTrue(not_recreated == [], msg="The following sstate files were not recreated: %s" % ', '.join(map(str, not_recreated))) |
153 | 178 | ||
154 | created_once = [x for x in file_tracker_2 if x not in file_tracker_1] | 179 | created_once = [x for x in file_tracker_2 if x not in file_tracker_1] |
155 | self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once))) | 180 | self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once))) |
181 | |||
182 | def sstate_common_samesigs(self, configA, configB, allarch=False): | ||
183 | |||
184 | self.write_config(configA) | ||
185 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") | ||
186 | bitbake("world meta-toolchain -S none") | ||
187 | self.write_config(configB) | ||
188 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") | ||
189 | bitbake("world meta-toolchain -S none") | ||
190 | |||
191 | def get_files(d, result): | ||
192 | for root, dirs, files in os.walk(d): | ||
193 | for name in files: | ||
194 | if "meta-environment" in root or "cross-canadian" in root: | ||
195 | continue | ||
196 | if "do_build" not in name: | ||
197 | # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79 | ||
198 | (_, task, _, shash) = name.rsplit(".", 3) | ||
199 | result[os.path.join(os.path.basename(root), task)] = shash | ||
200 | |||
201 | files1 = {} | ||
202 | files2 = {} | ||
203 | subdirs = sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")) | ||
204 | if allarch: | ||
205 | subdirs.extend(sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux"))) | ||
206 | |||
207 | for subdir in subdirs: | ||
208 | nativesdkdir = os.path.basename(subdir) | ||
209 | get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir, files1) | ||
210 | get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir, files2) | ||
211 | |||
212 | self.maxDiff = None | ||
213 | self.assertEqual(files1, files2) | ||
214 | |||
215 | class SStateTests(SStateBase): | ||
216 | def test_autorev_sstate_works(self): | ||
217 | # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV} | ||
156 | 218 | ||
219 | tempdir = tempfile.mkdtemp(prefix='sstate_autorev') | ||
220 | tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir') | ||
221 | self.track_for_cleanup(tempdir) | ||
222 | self.track_for_cleanup(tempdldir) | ||
223 | create_temp_layer(tempdir, 'selftestrecipetool') | ||
224 | self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir) | ||
225 | self.append_config("DL_DIR = \"%s\"" % tempdldir) | ||
226 | runCmd('bitbake-layers add-layer %s' % tempdir) | ||
227 | |||
228 | # Use dbus-wait as a local git repo we can add a commit between two builds in | ||
229 | pn = 'dbus-wait' | ||
230 | srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517' | ||
231 | url = 'git://git.yoctoproject.org/dbus-wait' | ||
232 | result = runCmd('git clone %s noname' % url, cwd=tempdir) | ||
233 | srcdir = os.path.join(tempdir, 'noname') | ||
234 | result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir) | ||
235 | self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory') | ||
236 | |||
237 | recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb') | ||
238 | os.makedirs(os.path.dirname(recipefile)) | ||
239 | srcuri = 'git://' + srcdir + ';protocol=file;branch=master' | ||
240 | result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri]) | ||
241 | self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) | ||
242 | |||
243 | with open(recipefile, 'a') as f: | ||
244 | f.write('SRCREV = "${AUTOREV}"\n') | ||
245 | f.write('PV = "1.0"\n') | ||
246 | |||
247 | bitbake("dbus-wait-test -c fetch") | ||
248 | with open(os.path.join(srcdir, "bar.txt"), "w") as f: | ||
249 | f.write("foo") | ||
250 | result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir) | ||
251 | bitbake("dbus-wait-test -c unpack") | ||
252 | |||
253 | class SStateCreation(SStateBase): | ||
254 | def test_sstate_creation_distro_specific_pass(self): | ||
255 | self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) | ||
256 | |||
257 | def test_sstate_creation_distro_specific_fail(self): | ||
258 | self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False) | ||
259 | |||
260 | def test_sstate_creation_distro_nonspecific_pass(self): | ||
261 | self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) | ||
262 | |||
263 | def test_sstate_creation_distro_nonspecific_fail(self): | ||
264 | self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False) | ||
265 | |||
266 | class SStateCleanup(SStateBase): | ||
267 | def test_cleansstate_task_distro_specific_nonspecific(self): | ||
268 | targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native'] | ||
269 | targets.append('linux-libc-headers') | ||
270 | self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True) | ||
271 | |||
272 | def test_cleansstate_task_distro_nonspecific(self): | ||
273 | self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) | ||
274 | |||
275 | def test_cleansstate_task_distro_specific(self): | ||
276 | targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native'] | ||
277 | targets.append('linux-libc-headers') | ||
278 | self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) | ||
279 | |||
280 | class SStateDistroTests(SStateBase): | ||
157 | def test_rebuild_distro_specific_sstate_cross_native_targets(self): | 281 | def test_rebuild_distro_specific_sstate_cross_native_targets(self): |
158 | self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True) | 282 | self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True) |
159 | 283 | ||
@@ -163,48 +287,48 @@ class SStateTests(SStateBase): | |||
163 | def test_rebuild_distro_specific_sstate_native_target(self): | 287 | def test_rebuild_distro_specific_sstate_native_target(self): |
164 | self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True) | 288 | self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True) |
165 | 289 | ||
166 | 290 | class SStateCacheManagement(SStateBase): | |
167 | # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list | 291 | # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list |
168 | # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE) | 292 | # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.py (such as changing the value of MACHINE) |
169 | def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]): | 293 | def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]): |
170 | self.assertTrue(global_config) | 294 | self.assertTrue(global_config) |
171 | self.assertTrue(target_config) | 295 | self.assertTrue(target_config) |
172 | self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements') | 296 | self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements') |
173 | self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path]) | ||
174 | 297 | ||
175 | # If buildhistory is enabled, we need to disable version-going-backwards | 298 | for idx in range(len(target_config)): |
176 | # QA checks for this test. It may report errors otherwise. | 299 | self.append_config(global_config[idx]) |
177 | self.append_config('ERROR_QA_remove = "version-going-backwards"') | 300 | self.append_recipeinc(target, target_config[idx]) |
301 | bitbake(target) | ||
302 | self.remove_config(global_config[idx]) | ||
303 | self.remove_recipeinc(target, target_config[idx]) | ||
304 | |||
305 | self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path]) | ||
178 | 306 | ||
179 | # For not this only checks if random sstate tasks are handled correctly as a group. | 307 | # For now this only checks if random sstate tasks are handled correctly as a group. |
180 | # In the future we should add control over what tasks we check for. | 308 | # In the future we should add control over what tasks we check for. |
181 | 309 | ||
182 | sstate_archs_list = [] | ||
183 | expected_remaining_sstate = [] | 310 | expected_remaining_sstate = [] |
184 | for idx in range(len(target_config)): | 311 | for idx in range(len(target_config)): |
185 | self.append_config(global_config[idx]) | 312 | self.append_config(global_config[idx]) |
186 | self.append_recipeinc(target, target_config[idx]) | 313 | self.append_recipeinc(target, target_config[idx]) |
187 | sstate_arch = get_bb_var('SSTATE_PKGARCH', target) | ||
188 | if not sstate_arch in sstate_archs_list: | ||
189 | sstate_archs_list.append(sstate_arch) | ||
190 | if target_config[idx] == target_config[-1]: | 314 | if target_config[idx] == target_config[-1]: |
191 | target_sstate_before_build = self.search_sstate(target + r'.*?\.tgz$') | 315 | target_sstate_before_build = self.search_sstate(target + r'.*?\.tar.zst$') |
192 | bitbake("-cclean %s" % target) | 316 | bitbake("-cclean %s" % target) |
193 | result = bitbake(target, ignore_status=True) | 317 | result = bitbake(target, ignore_status=True) |
194 | if target_config[idx] == target_config[-1]: | 318 | if target_config[idx] == target_config[-1]: |
195 | target_sstate_after_build = self.search_sstate(target + r'.*?\.tgz$') | 319 | target_sstate_after_build = self.search_sstate(target + r'.*?\.tar.zst$') |
196 | expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)] | 320 | expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)] |
197 | self.remove_config(global_config[idx]) | 321 | self.remove_config(global_config[idx]) |
198 | self.remove_recipeinc(target, target_config[idx]) | 322 | self.remove_recipeinc(target, target_config[idx]) |
199 | self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output)) | 323 | self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output)) |
200 | 324 | ||
201 | runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list)))) | 325 | runCmd("sstate-cache-management.py -y --cache-dir=%s --remove-duplicated" % (self.sstate_path)) |
202 | actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)] | 326 | actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tar.zst$') if not any(pattern in x for pattern in ignore_patterns)] |
203 | 327 | ||
204 | actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate] | 328 | actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate] |
205 | self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected))) | 329 | self.assertFalse(actual_not_expected, msg="Files should have been removed but were not: %s" % ', '.join(map(str, actual_not_expected))) |
206 | expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate] | 330 | expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate] |
207 | self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual))) | 331 | self.assertFalse(expected_not_actual, msg="Extra files were removed: %s" ', '.join(map(str, expected_not_actual))) |
208 | 332 | ||
209 | def test_sstate_cache_management_script_using_pr_1(self): | 333 | def test_sstate_cache_management_script_using_pr_1(self): |
210 | global_config = [] | 334 | global_config = [] |
@@ -242,6 +366,7 @@ class SStateTests(SStateBase): | |||
242 | target_config.append('') | 366 | target_config.append('') |
243 | self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic']) | 367 | self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic']) |
244 | 368 | ||
369 | class SStateHashSameSigs(SStateBase): | ||
245 | def test_sstate_32_64_same_hash(self): | 370 | def test_sstate_32_64_same_hash(self): |
246 | """ | 371 | """ |
247 | The sstate checksums for both native and target should not vary whether | 372 | The sstate checksums for both native and target should not vary whether |
@@ -261,7 +386,7 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb" | |||
261 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 386 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
262 | """) | 387 | """) |
263 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") | 388 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") |
264 | bitbake("core-image-sato -S none") | 389 | bitbake("core-image-weston -S none") |
265 | self.write_config(""" | 390 | self.write_config(""" |
266 | MACHINE = "qemux86" | 391 | MACHINE = "qemux86" |
267 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" | 392 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" |
@@ -273,12 +398,12 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb" | |||
273 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 398 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
274 | """) | 399 | """) |
275 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") | 400 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") |
276 | bitbake("core-image-sato -S none") | 401 | bitbake("core-image-weston -S none") |
277 | 402 | ||
278 | def get_files(d): | 403 | def get_files(d): |
279 | f = [] | 404 | f = [] |
280 | for root, dirs, files in os.walk(d): | 405 | for root, dirs, files in os.walk(d): |
281 | if "core-image-sato" in root: | 406 | if "core-image-weston" in root: |
282 | # SDKMACHINE changing will change | 407 | # SDKMACHINE changing will change |
283 | # do_rootfs/do_testimage/do_build stamps of images which | 408 | # do_rootfs/do_testimage/do_build stamps of images which |
284 | # is safe to ignore. | 409 | # is safe to ignore. |
@@ -306,7 +431,7 @@ NATIVELSBSTRING = \"DistroA\" | |||
306 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 431 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
307 | """) | 432 | """) |
308 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") | 433 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") |
309 | bitbake("core-image-sato -S none") | 434 | bitbake("core-image-weston -S none") |
310 | self.write_config(""" | 435 | self.write_config(""" |
311 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" | 436 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" |
312 | TCLIBCAPPEND = \"\" | 437 | TCLIBCAPPEND = \"\" |
@@ -314,7 +439,7 @@ NATIVELSBSTRING = \"DistroB\" | |||
314 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 439 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
315 | """) | 440 | """) |
316 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") | 441 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") |
317 | bitbake("core-image-sato -S none") | 442 | bitbake("core-image-weston -S none") |
318 | 443 | ||
319 | def get_files(d): | 444 | def get_files(d): |
320 | f = [] | 445 | f = [] |
@@ -327,6 +452,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
327 | self.maxDiff = None | 452 | self.maxDiff = None |
328 | self.assertCountEqual(files1, files2) | 453 | self.assertCountEqual(files1, files2) |
329 | 454 | ||
455 | class SStateHashSameSigs2(SStateBase): | ||
330 | def test_sstate_allarch_samesigs(self): | 456 | def test_sstate_allarch_samesigs(self): |
331 | """ | 457 | """ |
332 | The sstate checksums of allarch packages should be independent of whichever | 458 | The sstate checksums of allarch packages should be independent of whichever |
@@ -341,13 +467,15 @@ TCLIBCAPPEND = \"\" | |||
341 | MACHINE = \"qemux86-64\" | 467 | MACHINE = \"qemux86-64\" |
342 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 468 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
343 | """ | 469 | """ |
470 | #OLDEST_KERNEL is arch specific so set to a different value here for testing | ||
344 | configB = """ | 471 | configB = """ |
345 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" | 472 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" |
346 | TCLIBCAPPEND = \"\" | 473 | TCLIBCAPPEND = \"\" |
347 | MACHINE = \"qemuarm\" | 474 | MACHINE = \"qemuarm\" |
475 | OLDEST_KERNEL = \"3.3.0\" | ||
348 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 476 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
349 | """ | 477 | """ |
350 | self.sstate_allarch_samesigs(configA, configB) | 478 | self.sstate_common_samesigs(configA, configB, allarch=True) |
351 | 479 | ||
352 | def test_sstate_nativesdk_samesigs_multilib(self): | 480 | def test_sstate_nativesdk_samesigs_multilib(self): |
353 | """ | 481 | """ |
@@ -360,7 +488,7 @@ TCLIBCAPPEND = \"\" | |||
360 | MACHINE = \"qemux86-64\" | 488 | MACHINE = \"qemux86-64\" |
361 | require conf/multilib.conf | 489 | require conf/multilib.conf |
362 | MULTILIBS = \"multilib:lib32\" | 490 | MULTILIBS = \"multilib:lib32\" |
363 | DEFAULTTUNE_virtclass-multilib-lib32 = \"x86\" | 491 | DEFAULTTUNE:virtclass-multilib-lib32 = \"x86\" |
364 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 492 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
365 | """ | 493 | """ |
366 | configB = """ | 494 | configB = """ |
@@ -371,36 +499,9 @@ require conf/multilib.conf | |||
371 | MULTILIBS = \"\" | 499 | MULTILIBS = \"\" |
372 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 500 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
373 | """ | 501 | """ |
374 | self.sstate_allarch_samesigs(configA, configB) | 502 | self.sstate_common_samesigs(configA, configB) |
375 | |||
376 | def sstate_allarch_samesigs(self, configA, configB): | ||
377 | |||
378 | self.write_config(configA) | ||
379 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") | ||
380 | bitbake("world meta-toolchain -S none") | ||
381 | self.write_config(configB) | ||
382 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") | ||
383 | bitbake("world meta-toolchain -S none") | ||
384 | |||
385 | def get_files(d): | ||
386 | f = {} | ||
387 | for root, dirs, files in os.walk(d): | ||
388 | for name in files: | ||
389 | if "meta-environment" in root or "cross-canadian" in root: | ||
390 | continue | ||
391 | if "do_build" not in name: | ||
392 | # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79 | ||
393 | (_, task, _, shash) = name.rsplit(".", 3) | ||
394 | f[os.path.join(os.path.basename(root), task)] = shash | ||
395 | return f | ||
396 | |||
397 | nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0]) | ||
398 | |||
399 | files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir) | ||
400 | files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir) | ||
401 | self.maxDiff = None | ||
402 | self.assertEqual(files1, files2) | ||
403 | 503 | ||
504 | class SStateHashSameSigs3(SStateBase): | ||
404 | def test_sstate_sametune_samesigs(self): | 505 | def test_sstate_sametune_samesigs(self): |
405 | """ | 506 | """ |
406 | The sstate checksums of two identical machines (using the same tune) should be the | 507 | The sstate checksums of two identical machines (using the same tune) should be the |
@@ -414,7 +515,7 @@ TCLIBCAPPEND = \"\" | |||
414 | MACHINE = \"qemux86\" | 515 | MACHINE = \"qemux86\" |
415 | require conf/multilib.conf | 516 | require conf/multilib.conf |
416 | MULTILIBS = "multilib:lib32" | 517 | MULTILIBS = "multilib:lib32" |
417 | DEFAULTTUNE_virtclass-multilib-lib32 = "x86" | 518 | DEFAULTTUNE:virtclass-multilib-lib32 = "x86" |
418 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 519 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
419 | """) | 520 | """) |
420 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") | 521 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") |
@@ -425,7 +526,7 @@ TCLIBCAPPEND = \"\" | |||
425 | MACHINE = \"qemux86copy\" | 526 | MACHINE = \"qemux86copy\" |
426 | require conf/multilib.conf | 527 | require conf/multilib.conf |
427 | MULTILIBS = "multilib:lib32" | 528 | MULTILIBS = "multilib:lib32" |
428 | DEFAULTTUNE_virtclass-multilib-lib32 = "x86" | 529 | DEFAULTTUNE:virtclass-multilib-lib32 = "x86" |
429 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 530 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
430 | """) | 531 | """) |
431 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") | 532 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") |
@@ -435,7 +536,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
435 | f = [] | 536 | f = [] |
436 | for root, dirs, files in os.walk(d): | 537 | for root, dirs, files in os.walk(d): |
437 | for name in files: | 538 | for name in files: |
438 | if "meta-environment" in root or "cross-canadian" in root: | 539 | if "meta-environment" in root or "cross-canadian" in root or 'meta-ide-support' in root: |
439 | continue | 540 | continue |
440 | if "qemux86copy-" in root or "qemux86-" in root: | 541 | if "qemux86copy-" in root or "qemux86-" in root: |
441 | continue | 542 | continue |
@@ -462,7 +563,7 @@ TCLIBCAPPEND = \"\" | |||
462 | MACHINE = \"qemux86\" | 563 | MACHINE = \"qemux86\" |
463 | require conf/multilib.conf | 564 | require conf/multilib.conf |
464 | MULTILIBS = "multilib:lib32" | 565 | MULTILIBS = "multilib:lib32" |
465 | DEFAULTTUNE_virtclass-multilib-lib32 = "x86" | 566 | DEFAULTTUNE:virtclass-multilib-lib32 = "x86" |
466 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 567 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
467 | """) | 568 | """) |
468 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") | 569 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") |
@@ -488,7 +589,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
488 | self.maxDiff = None | 589 | self.maxDiff = None |
489 | self.assertCountEqual(files1, files2) | 590 | self.assertCountEqual(files1, files2) |
490 | 591 | ||
491 | 592 | class SStateHashSameSigs4(SStateBase): | |
492 | def test_sstate_noop_samesigs(self): | 593 | def test_sstate_noop_samesigs(self): |
493 | """ | 594 | """ |
494 | The sstate checksums of two builds with these variables changed or | 595 | The sstate checksums of two builds with these variables changed or |
@@ -503,7 +604,7 @@ PARALLEL_MAKE = "-j 1" | |||
503 | DL_DIR = "${TOPDIR}/download1" | 604 | DL_DIR = "${TOPDIR}/download1" |
504 | TIME = "111111" | 605 | TIME = "111111" |
505 | DATE = "20161111" | 606 | DATE = "20161111" |
506 | INHERIT_remove = "buildstats-summary buildhistory uninative" | 607 | INHERIT:remove = "buildstats-summary buildhistory uninative" |
507 | http_proxy = "" | 608 | http_proxy = "" |
508 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 609 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
509 | """) | 610 | """) |
@@ -519,7 +620,7 @@ DL_DIR = "${TOPDIR}/download2" | |||
519 | TIME = "222222" | 620 | TIME = "222222" |
520 | DATE = "20161212" | 621 | DATE = "20161212" |
521 | # Always remove uninative as we're changing proxies | 622 | # Always remove uninative as we're changing proxies |
522 | INHERIT_remove = "uninative" | 623 | INHERIT:remove = "uninative" |
523 | INHERIT += "buildstats-summary buildhistory" | 624 | INHERIT += "buildstats-summary buildhistory" |
524 | http_proxy = "http://example.com/" | 625 | http_proxy = "http://example.com/" |
525 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 626 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
@@ -573,3 +674,335 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
573 | compare_sigfiles(rest, files1, files2, compare=False) | 674 | compare_sigfiles(rest, files1, files2, compare=False) |
574 | 675 | ||
575 | self.fail("sstate hashes not identical.") | 676 | self.fail("sstate hashes not identical.") |
677 | |||
678 | def test_sstate_movelayer_samesigs(self): | ||
679 | """ | ||
680 | The sstate checksums of two builds with the same oe-core layer in two | ||
681 | different locations should be the same. | ||
682 | """ | ||
683 | core_layer = os.path.join( | ||
684 | self.tc.td["COREBASE"], 'meta') | ||
685 | copy_layer_1 = self.topdir + "/meta-copy1/meta" | ||
686 | copy_layer_2 = self.topdir + "/meta-copy2/meta" | ||
687 | |||
688 | oe.path.copytree(core_layer, copy_layer_1) | ||
689 | os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy1/scripts") | ||
690 | self.write_config(""" | ||
691 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash" | ||
692 | """) | ||
693 | bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_1, core_layer) | ||
694 | self.write_bblayers_config(bblayers_conf) | ||
695 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") | ||
696 | bitbake("bash -S none") | ||
697 | |||
698 | oe.path.copytree(core_layer, copy_layer_2) | ||
699 | os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy2/scripts") | ||
700 | self.write_config(""" | ||
701 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" | ||
702 | """) | ||
703 | bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_2, core_layer) | ||
704 | self.write_bblayers_config(bblayers_conf) | ||
705 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") | ||
706 | bitbake("bash -S none") | ||
707 | |||
708 | def get_files(d): | ||
709 | f = [] | ||
710 | for root, dirs, files in os.walk(d): | ||
711 | for name in files: | ||
712 | f.append(os.path.join(root, name)) | ||
713 | return f | ||
714 | files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps") | ||
715 | files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps") | ||
716 | files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2] | ||
717 | self.maxDiff = None | ||
718 | self.assertCountEqual(files1, files2) | ||
719 | |||
720 | class SStateFindSiginfo(SStateBase): | ||
721 | def test_sstate_compare_sigfiles_and_find_siginfo(self): | ||
722 | """ | ||
723 | Test the functionality of the find_siginfo: basic function and callback in compare_sigfiles | ||
724 | """ | ||
725 | self.write_config(""" | ||
726 | TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\" | ||
727 | TCLIBCAPPEND = \"\" | ||
728 | MACHINE = \"qemux86-64\" | ||
729 | require conf/multilib.conf | ||
730 | MULTILIBS = "multilib:lib32" | ||
731 | DEFAULTTUNE:virtclass-multilib-lib32 = "x86" | ||
732 | BB_SIGNATURE_HANDLER = "OEBasicHash" | ||
733 | """) | ||
734 | self.track_for_cleanup(self.topdir + "/tmp-sstates-findsiginfo") | ||
735 | |||
736 | pns = ["binutils", "binutils-native", "lib32-binutils"] | ||
737 | target_configs = [ | ||
738 | """ | ||
739 | TMPVAL1 = "tmpval1" | ||
740 | TMPVAL2 = "tmpval2" | ||
741 | do_tmptask1() { | ||
742 | echo ${TMPVAL1} | ||
743 | } | ||
744 | do_tmptask2() { | ||
745 | echo ${TMPVAL2} | ||
746 | } | ||
747 | addtask do_tmptask1 | ||
748 | addtask tmptask2 before do_tmptask1 | ||
749 | """, | ||
750 | """ | ||
751 | TMPVAL3 = "tmpval3" | ||
752 | TMPVAL4 = "tmpval4" | ||
753 | do_tmptask1() { | ||
754 | echo ${TMPVAL3} | ||
755 | } | ||
756 | do_tmptask2() { | ||
757 | echo ${TMPVAL4} | ||
758 | } | ||
759 | addtask do_tmptask1 | ||
760 | addtask tmptask2 before do_tmptask1 | ||
761 | """ | ||
762 | ] | ||
763 | |||
764 | for target_config in target_configs: | ||
765 | self.write_recipeinc("binutils", target_config) | ||
766 | for pn in pns: | ||
767 | bitbake("%s -c do_tmptask1 -S none" % pn) | ||
768 | self.delete_recipeinc("binutils") | ||
769 | |||
770 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
771 | tinfoil.prepare(config_only=True) | ||
772 | |||
773 | def find_siginfo(pn, taskname, sigs=None): | ||
774 | result = None | ||
775 | command_complete = False | ||
776 | tinfoil.set_event_mask(["bb.event.FindSigInfoResult", | ||
777 | "bb.command.CommandCompleted"]) | ||
778 | ret = tinfoil.run_command("findSigInfo", pn, taskname, sigs) | ||
779 | if ret: | ||
780 | while result is None or not command_complete: | ||
781 | event = tinfoil.wait_event(1) | ||
782 | if event: | ||
783 | if isinstance(event, bb.command.CommandCompleted): | ||
784 | command_complete = True | ||
785 | elif isinstance(event, bb.event.FindSigInfoResult): | ||
786 | result = event.result | ||
787 | return result | ||
788 | |||
789 | def recursecb(key, hash1, hash2): | ||
790 | nonlocal recursecb_count | ||
791 | recursecb_count += 1 | ||
792 | hashes = [hash1, hash2] | ||
793 | hashfiles = find_siginfo(key, None, hashes) | ||
794 | self.assertCountEqual(hashes, hashfiles) | ||
795 | bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb) | ||
796 | |||
797 | for pn in pns: | ||
798 | recursecb_count = 0 | ||
799 | matches = find_siginfo(pn, "do_tmptask1") | ||
800 | self.assertGreaterEqual(len(matches), 2) | ||
801 | latesthashes = sorted(matches.keys(), key=lambda h: matches[h]['time'])[-2:] | ||
802 | bb.siggen.compare_sigfiles(matches[latesthashes[-2]]['path'], matches[latesthashes[-1]]['path'], recursecb) | ||
803 | self.assertEqual(recursecb_count,1) | ||
804 | |||
805 | class SStatePrintdiff(SStateBase): | ||
806 | def run_test_printdiff_changerecipe(self, target, change_recipe, change_bbtask, change_content, expected_sametmp_output, expected_difftmp_output): | ||
807 | import time | ||
808 | self.write_config(""" | ||
809 | TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}" | ||
810 | """.format(time.time())) | ||
811 | # Use runall do_build to ensure any indirect sstate is created, e.g. tzcode-native on both x86 and | ||
812 | # aarch64 hosts since only allarch target recipes depend upon it and it may not be built otherwise. | ||
813 | # A bitbake -c cleansstate tzcode-native would cause some of these tests to error for example. | ||
814 | bitbake("--runall build --runall deploy_source_date_epoch {}".format(target)) | ||
815 | bitbake("-S none {}".format(target)) | ||
816 | bitbake(change_bbtask) | ||
817 | self.write_recipeinc(change_recipe, change_content) | ||
818 | result_sametmp = bitbake("-S printdiff {}".format(target)) | ||
819 | |||
820 | self.write_config(""" | ||
821 | TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}" | ||
822 | """.format(time.time())) | ||
823 | result_difftmp = bitbake("-S printdiff {}".format(target)) | ||
824 | |||
825 | self.delete_recipeinc(change_recipe) | ||
826 | for item in expected_sametmp_output: | ||
827 | self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output)) | ||
828 | for item in expected_difftmp_output: | ||
829 | self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output)) | ||
830 | |||
831 | def run_test_printdiff_changeconfig(self, target, change_bbtasks, change_content, expected_sametmp_output, expected_difftmp_output): | ||
832 | import time | ||
833 | self.write_config(""" | ||
834 | TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}" | ||
835 | """.format(time.time())) | ||
836 | bitbake("--runall build --runall deploy_source_date_epoch {}".format(target)) | ||
837 | bitbake("-S none {}".format(target)) | ||
838 | bitbake(" ".join(change_bbtasks)) | ||
839 | self.append_config(change_content) | ||
840 | result_sametmp = bitbake("-S printdiff {}".format(target)) | ||
841 | |||
842 | self.write_config(""" | ||
843 | TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}" | ||
844 | """.format(time.time())) | ||
845 | self.append_config(change_content) | ||
846 | result_difftmp = bitbake("-S printdiff {}".format(target)) | ||
847 | |||
848 | for item in expected_sametmp_output: | ||
849 | self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output)) | ||
850 | for item in expected_difftmp_output: | ||
851 | self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output)) | ||
852 | |||
853 | |||
854 | # Check if printdiff walks the full dependency chain from the image target to where the change is in a specific recipe | ||
855 | def test_image_minimal_vs_perlcross(self): | ||
856 | expected_output = ("Task perlcross-native:do_install couldn't be used from the cache because:", | ||
857 | "We need hash", | ||
858 | "most recent matching task was") | ||
859 | expected_sametmp_output = expected_output + ( | ||
860 | "Variable do_install value changed", | ||
861 | '+ echo "this changes the task signature"') | ||
862 | expected_difftmp_output = expected_output | ||
863 | |||
864 | self.run_test_printdiff_changerecipe("core-image-minimal", "perlcross", "-c do_install perlcross-native", | ||
865 | """ | ||
866 | do_install:append() { | ||
867 | echo "this changes the task signature" | ||
868 | } | ||
869 | """, | ||
870 | expected_sametmp_output, expected_difftmp_output) | ||
871 | |||
872 | # Check if changes to gcc-source (which uses tmp/work-shared) are correctly discovered | ||
873 | def test_gcc_runtime_vs_gcc_source(self): | ||
874 | gcc_source_pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV'] | ||
875 | |||
876 | expected_output = ("Task {}:do_preconfigure couldn't be used from the cache because:".format(gcc_source_pn), | ||
877 | "We need hash", | ||
878 | "most recent matching task was") | ||
879 | expected_sametmp_output = expected_output + ( | ||
880 | "Variable do_preconfigure value changed", | ||
881 | '+ print("this changes the task signature")') | ||
882 | expected_difftmp_output = expected_output | ||
883 | |||
884 | self.run_test_printdiff_changerecipe("gcc-runtime", "gcc-source", "-c do_preconfigure {}".format(gcc_source_pn), | ||
885 | """ | ||
886 | python do_preconfigure:append() { | ||
887 | print("this changes the task signature") | ||
888 | } | ||
889 | """, | ||
890 | expected_sametmp_output, expected_difftmp_output) | ||
891 | |||
892 | # Check if changing a really base task definiton is reported against multiple core recipes using it | ||
893 | def test_image_minimal_vs_base_do_configure(self): | ||
894 | change_bbtasks = ('zstd-native:do_configure', | ||
895 | 'texinfo-dummy-native:do_configure', | ||
896 | 'ldconfig-native:do_configure', | ||
897 | 'gettext-minimal-native:do_configure', | ||
898 | 'tzcode-native:do_configure', | ||
899 | 'makedevs-native:do_configure', | ||
900 | 'pigz-native:do_configure', | ||
901 | 'update-rc.d-native:do_configure', | ||
902 | 'unzip-native:do_configure', | ||
903 | 'gnu-config-native:do_configure') | ||
904 | |||
905 | expected_output = ["Task {} couldn't be used from the cache because:".format(t) for t in change_bbtasks] + [ | ||
906 | "We need hash", | ||
907 | "most recent matching task was"] | ||
908 | |||
909 | expected_sametmp_output = expected_output + [ | ||
910 | "Variable base_do_configure value changed", | ||
911 | '+ echo "this changes base_do_configure() definiton "'] | ||
912 | expected_difftmp_output = expected_output | ||
913 | |||
914 | self.run_test_printdiff_changeconfig("core-image-minimal",change_bbtasks, | ||
915 | """ | ||
916 | INHERIT += "base-do-configure-modified" | ||
917 | """, | ||
918 | expected_sametmp_output, expected_difftmp_output) | ||
919 | |||
920 | @OETestTag("yocto-mirrors") | ||
921 | class SStateMirrors(SStateBase): | ||
922 | def check_bb_output(self, output, exceptions, check_cdn): | ||
923 | def is_exception(object, exceptions): | ||
924 | for e in exceptions: | ||
925 | if re.search(e, object): | ||
926 | return True | ||
927 | return False | ||
928 | |||
929 | output_l = output.splitlines() | ||
930 | for l in output_l: | ||
931 | if l.startswith("Sstate summary"): | ||
932 | for idx, item in enumerate(l.split()): | ||
933 | if item == 'Missed': | ||
934 | missing_objects = int(l.split()[idx+1]) | ||
935 | break | ||
936 | else: | ||
937 | self.fail("Did not find missing objects amount in sstate summary: {}".format(l)) | ||
938 | break | ||
939 | else: | ||
940 | self.fail("Did not find 'Sstate summary' line in bitbake output") | ||
941 | |||
942 | failed_urls = [] | ||
943 | failed_urls_extrainfo = [] | ||
944 | for l in output_l: | ||
945 | if "SState: Unsuccessful fetch test for" in l and check_cdn: | ||
946 | missing_object = l.split()[6] | ||
947 | elif "SState: Looked for but didn't find file" in l and not check_cdn: | ||
948 | missing_object = l.split()[8] | ||
949 | else: | ||
950 | missing_object = None | ||
951 | if missing_object: | ||
952 | if not is_exception(missing_object, exceptions): | ||
953 | failed_urls.append(missing_object) | ||
954 | else: | ||
955 | missing_objects -= 1 | ||
956 | |||
957 | if "urlopen failed for" in l and not is_exception(l, exceptions): | ||
958 | failed_urls_extrainfo.append(l) | ||
959 | |||
960 | self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) | ||
961 | self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) | ||
962 | |||
963 | def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False): | ||
964 | # sstate is checked for existence of these, but they never get written out to begin with | ||
965 | exceptions += ["{}.*image_qa".format(t) for t in targets.split()] | ||
966 | exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()] | ||
967 | exceptions += ["{}.*image_complete".format(t) for t in targets.split()] | ||
968 | exceptions += ["linux-yocto.*shared_workdir"] | ||
969 | # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64) | ||
970 | # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks, | ||
971 | # which makes tracing other changes difficult | ||
972 | exceptions += ["{}.*create_spdx".format(t) for t in targets.split()] | ||
973 | exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()] | ||
974 | |||
975 | if check_cdn: | ||
976 | self.config_sstate(True) | ||
977 | self.append_config(""" | ||
978 | MACHINE = "{}" | ||
979 | BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687" | ||
980 | SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH" | ||
981 | """.format(machine)) | ||
982 | else: | ||
983 | self.append_config(""" | ||
984 | MACHINE = "{}" | ||
985 | """.format(machine)) | ||
986 | result = bitbake("-DD -n {}".format(targets)) | ||
987 | bitbake("-S none {}".format(targets)) | ||
988 | if ignore_errors: | ||
989 | return | ||
990 | self.check_bb_output(result.output, exceptions, check_cdn) | ||
991 | |||
992 | def test_cdn_mirror_qemux86_64(self): | ||
993 | exceptions = [] | ||
994 | self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True) | ||
995 | self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions) | ||
996 | |||
997 | def test_cdn_mirror_qemuarm64(self): | ||
998 | exceptions = [] | ||
999 | self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True) | ||
1000 | self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions) | ||
1001 | |||
1002 | def test_local_cache_qemux86_64(self): | ||
1003 | exceptions = [] | ||
1004 | self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False) | ||
1005 | |||
1006 | def test_local_cache_qemuarm64(self): | ||
1007 | exceptions = [] | ||
1008 | self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False) | ||
diff --git a/meta/lib/oeqa/selftest/cases/sysroot.py b/meta/lib/oeqa/selftest/cases/sysroot.py index 6e34927c90..ef854f6fee 100644 --- a/meta/lib/oeqa/selftest/cases/sysroot.py +++ b/meta/lib/oeqa/selftest/cases/sysroot.py | |||
@@ -1,11 +1,13 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
5 | import uuid | 7 | import uuid |
6 | 8 | ||
7 | from oeqa.selftest.case import OESelftestTestCase | 9 | from oeqa.selftest.case import OESelftestTestCase |
8 | from oeqa.utils.commands import bitbake | 10 | from oeqa.utils.commands import bitbake |
9 | 11 | ||
10 | class SysrootTests(OESelftestTestCase): | 12 | class SysrootTests(OESelftestTestCase): |
11 | def test_sysroot_cleanup(self): | 13 | def test_sysroot_cleanup(self): |
@@ -24,14 +26,61 @@ class SysrootTests(OESelftestTestCase): | |||
24 | self.write_config(""" | 26 | self.write_config(""" |
25 | PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1" | 27 | PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1" |
26 | MACHINE = "qemux86" | 28 | MACHINE = "qemux86" |
27 | TESTSTRING_pn-sysroot-test-arch1 = "%s" | 29 | TESTSTRING:pn-sysroot-test-arch1 = "%s" |
28 | TESTSTRING_pn-sysroot-test-arch2 = "%s" | 30 | TESTSTRING:pn-sysroot-test-arch2 = "%s" |
29 | """ % (uuid1, uuid2)) | 31 | """ % (uuid1, uuid2)) |
30 | bitbake("sysroot-test") | 32 | bitbake("sysroot-test") |
31 | self.write_config(""" | 33 | self.write_config(""" |
32 | PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2" | 34 | PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2" |
33 | MACHINE = "qemux86copy" | 35 | MACHINE = "qemux86copy" |
34 | TESTSTRING_pn-sysroot-test-arch1 = "%s" | 36 | TESTSTRING:pn-sysroot-test-arch1 = "%s" |
35 | TESTSTRING_pn-sysroot-test-arch2 = "%s" | 37 | TESTSTRING:pn-sysroot-test-arch2 = "%s" |
36 | """ % (uuid1, uuid2)) | 38 | """ % (uuid1, uuid2)) |
37 | bitbake("sysroot-test") | 39 | bitbake("sysroot-test") |
40 | |||
41 | def test_sysroot_max_shebang(self): | ||
42 | """ | ||
43 | Summary: Check max shebang triggers. To confirm [YOCTO #11053] is closed. | ||
44 | Expected: Fail when a shebang bigger than the max shebang-size is reached. | ||
45 | Author: Paulo Neves <ptsneves@gmail.com> | ||
46 | """ | ||
47 | expected = "maximum shebang size exceeded, the maximum size is 128. [shebang-size]" | ||
48 | res = bitbake("sysroot-shebang-test-native -c populate_sysroot", ignore_status=True) | ||
49 | self.assertTrue(expected in res.output, msg=res.output) | ||
50 | self.assertTrue(res.status != 0) | ||
51 | |||
52 | def test_sysroot_la(self): | ||
53 | """ | ||
54 | Summary: Check that workdir paths are not contained in .la files. | ||
55 | Expected: Fail when a workdir path is found in the file content. | ||
56 | Author: Paulo Neves <ptsneves@gmail.com> | ||
57 | """ | ||
58 | expected = "la-test.la failed sanity test (workdir) in path" | ||
59 | |||
60 | res = bitbake("sysroot-la-test -c populate_sysroot", ignore_status=True) | ||
61 | self.assertTrue(expected in res.output, msg=res.output) | ||
62 | self.assertTrue('[la]' in res.output, msg=res.output) | ||
63 | self.assertTrue(res.status != 0) | ||
64 | |||
65 | res = bitbake("sysroot-la-test-native -c populate_sysroot", ignore_status=True) | ||
66 | self.assertTrue(expected in res.output, msg=res.output) | ||
67 | self.assertTrue('[la]' in res.output, msg=res.output) | ||
68 | self.assertTrue(res.status != 0) | ||
69 | |||
70 | def test_sysroot_pkgconfig(self): | ||
71 | """ | ||
72 | Summary: Check that tmpdir paths are not contained in .pc files. | ||
73 | Expected: Fail when a tmpdir path is found in the file content. | ||
74 | Author: Paulo Neves <ptsneves@gmail.com> | ||
75 | """ | ||
76 | expected = "test.pc failed sanity test (tmpdir) in path" | ||
77 | |||
78 | res = bitbake("sysroot-pc-test -c populate_sysroot", ignore_status=True) | ||
79 | self.assertTrue('[pkgconfig]' in res.output, msg=res.output) | ||
80 | self.assertTrue(expected in res.output, msg=res.output) | ||
81 | self.assertTrue(res.status != 0) | ||
82 | |||
83 | res = bitbake("sysroot-pc-test-native -c populate_sysroot", ignore_status=True) | ||
84 | self.assertTrue(expected in res.output, msg=res.output) | ||
85 | self.assertTrue('[pkgconfig]' in res.output, msg=res.output) | ||
86 | self.assertTrue(res.status != 0) | ||
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py index a51c6048d3..21c8686b2a 100644 --- a/meta/lib/oeqa/selftest/cases/tinfoil.py +++ b/meta/lib/oeqa/selftest/cases/tinfoil.py | |||
@@ -1,4 +1,6 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
3 | # | 5 | # |
4 | 6 | ||
@@ -9,7 +11,6 @@ import logging | |||
9 | import bb.tinfoil | 11 | import bb.tinfoil |
10 | 12 | ||
11 | from oeqa.selftest.case import OESelftestTestCase | 13 | from oeqa.selftest.case import OESelftestTestCase |
12 | from oeqa.utils.commands import runCmd | ||
13 | 14 | ||
14 | class TinfoilTests(OESelftestTestCase): | 15 | class TinfoilTests(OESelftestTestCase): |
15 | """ Basic tests for the tinfoil API """ | 16 | """ Basic tests for the tinfoil API """ |
@@ -47,6 +48,17 @@ class TinfoilTests(OESelftestTestCase): | |||
47 | rd = tinfoil.parse_recipe_file(best[3]) | 48 | rd = tinfoil.parse_recipe_file(best[3]) |
48 | self.assertEqual(testrecipe, rd.getVar('PN')) | 49 | self.assertEqual(testrecipe, rd.getVar('PN')) |
49 | 50 | ||
51 | def test_parse_virtual_recipe(self): | ||
52 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
53 | tinfoil.prepare(config_only=False, quiet=2) | ||
54 | testrecipe = 'nativesdk-gcc' | ||
55 | best = tinfoil.find_best_provider(testrecipe) | ||
56 | if not best: | ||
57 | self.fail('Unable to find recipe providing %s' % testrecipe) | ||
58 | rd = tinfoil.parse_recipe_file(best[3]) | ||
59 | self.assertEqual(testrecipe, rd.getVar('PN')) | ||
60 | self.assertIsNotNone(rd.getVar('FILE_LAYERNAME')) | ||
61 | |||
50 | def test_parse_recipe_copy_expand(self): | 62 | def test_parse_recipe_copy_expand(self): |
51 | with bb.tinfoil.Tinfoil() as tinfoil: | 63 | with bb.tinfoil.Tinfoil() as tinfoil: |
52 | tinfoil.prepare(config_only=False, quiet=2) | 64 | tinfoil.prepare(config_only=False, quiet=2) |
@@ -65,6 +77,32 @@ class TinfoilTests(OESelftestTestCase): | |||
65 | localdata.setVar('PN', 'hello') | 77 | localdata.setVar('PN', 'hello') |
66 | self.assertEqual('hello', localdata.getVar('BPN')) | 78 | self.assertEqual('hello', localdata.getVar('BPN')) |
67 | 79 | ||
80 | # The config_data API to parse_recipe_file is used by: | ||
81 | # layerindex-web layerindex/update_layer.py | ||
82 | def test_parse_recipe_custom_data(self): | ||
83 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
84 | tinfoil.prepare(config_only=False, quiet=2) | ||
85 | localdata = bb.data.createCopy(tinfoil.config_data) | ||
86 | localdata.setVar("TESTVAR", "testval") | ||
87 | testrecipe = 'mdadm' | ||
88 | best = tinfoil.find_best_provider(testrecipe) | ||
89 | if not best: | ||
90 | self.fail('Unable to find recipe providing %s' % testrecipe) | ||
91 | rd = tinfoil.parse_recipe_file(best[3], config_data=localdata) | ||
92 | self.assertEqual("testval", rd.getVar('TESTVAR')) | ||
93 | |||
94 | def test_parse_virtual_recipe_custom_data(self): | ||
95 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
96 | tinfoil.prepare(config_only=False, quiet=2) | ||
97 | localdata = bb.data.createCopy(tinfoil.config_data) | ||
98 | localdata.setVar("TESTVAR", "testval") | ||
99 | testrecipe = 'nativesdk-gcc' | ||
100 | best = tinfoil.find_best_provider(testrecipe) | ||
101 | if not best: | ||
102 | self.fail('Unable to find recipe providing %s' % testrecipe) | ||
103 | rd = tinfoil.parse_recipe_file(best[3], config_data=localdata) | ||
104 | self.assertEqual("testval", rd.getVar('TESTVAR')) | ||
105 | |||
68 | def test_list_recipes(self): | 106 | def test_list_recipes(self): |
69 | with bb.tinfoil.Tinfoil() as tinfoil: | 107 | with bb.tinfoil.Tinfoil() as tinfoil: |
70 | tinfoil.prepare(config_only=False, quiet=2) | 108 | tinfoil.prepare(config_only=False, quiet=2) |
@@ -87,21 +125,20 @@ class TinfoilTests(OESelftestTestCase): | |||
87 | with bb.tinfoil.Tinfoil() as tinfoil: | 125 | with bb.tinfoil.Tinfoil() as tinfoil: |
88 | tinfoil.prepare(config_only=True) | 126 | tinfoil.prepare(config_only=True) |
89 | 127 | ||
90 | tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) | 128 | tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit']) |
91 | 129 | ||
92 | # Need to drain events otherwise events that were masked may still be in the queue | 130 | # Need to drain events otherwise events that were masked may still be in the queue |
93 | while tinfoil.wait_event(): | 131 | while tinfoil.wait_event(): |
94 | pass | 132 | pass |
95 | 133 | ||
96 | pattern = 'conf' | 134 | pattern = 'conf' |
97 | res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') | 135 | res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False) |
98 | self.assertTrue(res) | 136 | self.assertTrue(res) |
99 | 137 | ||
100 | eventreceived = False | 138 | eventreceived = False |
101 | commandcomplete = False | 139 | commandcomplete = False |
102 | start = time.time() | 140 | start = time.time() |
103 | # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example | 141 | # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example |
104 | # The test is IO load sensitive too | ||
105 | while (not (eventreceived == True and commandcomplete == True) | 142 | while (not (eventreceived == True and commandcomplete == True) |
106 | and (time.time() - start < 60)): | 143 | and (time.time() - start < 60)): |
107 | # if we received both events (on let's say a good day), we are done | 144 | # if we received both events (on let's say a good day), we are done |
@@ -111,14 +148,15 @@ class TinfoilTests(OESelftestTestCase): | |||
111 | commandcomplete = True | 148 | commandcomplete = True |
112 | elif isinstance(event, bb.event.FilesMatchingFound): | 149 | elif isinstance(event, bb.event.FilesMatchingFound): |
113 | self.assertEqual(pattern, event._pattern) | 150 | self.assertEqual(pattern, event._pattern) |
114 | self.assertIn('qemuarm.conf', event._matches) | 151 | self.assertIn('A', event._matches) |
152 | self.assertIn('B', event._matches) | ||
115 | eventreceived = True | 153 | eventreceived = True |
116 | elif isinstance(event, logging.LogRecord): | 154 | elif isinstance(event, logging.LogRecord): |
117 | continue | 155 | continue |
118 | else: | 156 | else: |
119 | self.fail('Unexpected event: %s' % event) | 157 | self.fail('Unexpected event: %s' % event) |
120 | 158 | ||
121 | self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server') | 159 | self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived)) |
122 | self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server') | 160 | self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server') |
123 | 161 | ||
124 | def test_setvariable_clean(self): | 162 | def test_setvariable_clean(self): |
@@ -173,8 +211,8 @@ class TinfoilTests(OESelftestTestCase): | |||
173 | self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name') | 211 | self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name') |
174 | # Test overrides | 212 | # Test overrides |
175 | tinfoil.config_data.setVar('TESTVAR', 'original') | 213 | tinfoil.config_data.setVar('TESTVAR', 'original') |
176 | tinfoil.config_data.setVar('TESTVAR_overrideone', 'one') | 214 | tinfoil.config_data.setVar('TESTVAR:overrideone', 'one') |
177 | tinfoil.config_data.setVar('TESTVAR_overridetwo', 'two') | 215 | tinfoil.config_data.setVar('TESTVAR:overridetwo', 'two') |
178 | tinfoil.config_data.appendVar('OVERRIDES', ':overrideone') | 216 | tinfoil.config_data.appendVar('OVERRIDES', ':overrideone') |
179 | value = tinfoil.config_data.getVar('TESTVAR') | 217 | value = tinfoil.config_data.getVar('TESTVAR') |
180 | self.assertEqual(value, 'one', 'Variable overrides not functioning correctly') | 218 | self.assertEqual(value, 'one', 'Variable overrides not functioning correctly') |
diff --git a/meta/lib/oeqa/selftest/cases/usergrouptests.py b/meta/lib/oeqa/selftest/cases/usergrouptests.py new file mode 100644 index 0000000000..3c59b0f290 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/usergrouptests.py | |||
@@ -0,0 +1,57 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import os | ||
8 | import shutil | ||
9 | from oeqa.selftest.case import OESelftestTestCase | ||
10 | from oeqa.utils.commands import bitbake | ||
11 | from oeqa.utils.commands import bitbake, get_bb_var, get_test_layer | ||
12 | |||
13 | class UserGroupTests(OESelftestTestCase): | ||
14 | def test_group_from_dep_package(self): | ||
15 | self.logger.info("Building creategroup2") | ||
16 | bitbake(' creategroup2 creategroup1') | ||
17 | bitbake(' creategroup2 creategroup1 -c clean') | ||
18 | self.logger.info("Packaging creategroup2") | ||
19 | self.assertTrue(bitbake(' creategroup2 -c package')) | ||
20 | |||
21 | def test_add_task_between_p_sysroot_and_package(self): | ||
22 | # Test for YOCTO #14961 | ||
23 | self.assertTrue(bitbake('useraddbadtask -C fetch')) | ||
24 | |||
25 | def test_postinst_order(self): | ||
26 | self.logger.info("Building dcreategroup") | ||
27 | self.assertTrue(bitbake(' dcreategroup')) | ||
28 | |||
29 | def test_static_useradd_from_dynamic(self): | ||
30 | metaselftestpath = get_test_layer() | ||
31 | self.logger.info("Building core-image-minimal to generate passwd/group file") | ||
32 | bitbake(' core-image-minimal') | ||
33 | self.logger.info("Setting up useradd-staticids") | ||
34 | repropassdir = os.path.join(metaselftestpath, "conf/include") | ||
35 | os.makedirs(repropassdir) | ||
36 | etcdir=os.path.join(os.path.join(os.path.join(get_bb_var("TMPDIR"), "work"), \ | ||
37 | os.path.join(get_bb_var("MACHINE").replace("-","_")+"-poky-linux", "core-image-minimal/1.0/rootfs/etc"))) | ||
38 | shutil.copy(os.path.join(etcdir, "passwd"), os.path.join(repropassdir, "reproducable-passwd")) | ||
39 | shutil.copy(os.path.join(etcdir, "group"), os.path.join(repropassdir, "reproducable-group")) | ||
40 | # Copy the original local.conf | ||
41 | shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig')) | ||
42 | |||
43 | self.write_config("USERADDEXTENSION = \"useradd-staticids\"") | ||
44 | self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"") | ||
45 | self.write_config("USERADD_UID_TABLES += \"conf/include/reproducible-passwd\"") | ||
46 | self.write_config("USERADD_GID_TABLES += \"conf/include/reproducible-group\"") | ||
47 | self.logger.info("Rebuild with staticids") | ||
48 | bitbake(' core-image-minimal') | ||
49 | shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf')) | ||
50 | self.logger.info("Rebuild without staticids") | ||
51 | bitbake(' core-image-minimal') | ||
52 | self.write_config("USERADDEXTENSION = \"useradd-staticids\"") | ||
53 | self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"") | ||
54 | self.write_config("USERADD_UID_TABLES += \"files/static-passwd\"") | ||
55 | self.write_config("USERADD_GID_TABLES += \"files/static-group\"") | ||
56 | self.logger.info("Rebuild with other staticids") | ||
57 | self.assertTrue(bitbake(' core-image-minimal')) | ||
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py index 2bf5cb9a86..b616759209 100644 --- a/meta/lib/oeqa/selftest/cases/wic.py +++ b/meta/lib/oeqa/selftest/cases/wic.py | |||
@@ -11,39 +11,19 @@ | |||
11 | import os | 11 | import os |
12 | import sys | 12 | import sys |
13 | import unittest | 13 | import unittest |
14 | import hashlib | ||
14 | 15 | ||
15 | from glob import glob | 16 | from glob import glob |
16 | from shutil import rmtree, copy | 17 | from shutil import rmtree, copy |
17 | from functools import wraps, lru_cache | ||
18 | from tempfile import NamedTemporaryFile | 18 | from tempfile import NamedTemporaryFile |
19 | from tempfile import TemporaryDirectory | ||
19 | 20 | ||
20 | from oeqa.selftest.case import OESelftestTestCase | 21 | from oeqa.selftest.case import OESelftestTestCase |
22 | from oeqa.core.decorator import OETestTag | ||
23 | from oeqa.core.decorator.data import skipIfNotArch | ||
21 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu | 24 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu |
22 | 25 | ||
23 | 26 | ||
24 | @lru_cache(maxsize=32) | ||
25 | def get_host_arch(recipe): | ||
26 | """A cached call to get_bb_var('HOST_ARCH', <recipe>)""" | ||
27 | return get_bb_var('HOST_ARCH', recipe) | ||
28 | |||
29 | |||
30 | def only_for_arch(archs, image='core-image-minimal'): | ||
31 | """Decorator for wrapping test cases that can be run only for specific target | ||
32 | architectures. A list of compatible architectures is passed in `archs`. | ||
33 | Current architecture will be determined by parsing bitbake output for | ||
34 | `image` recipe. | ||
35 | """ | ||
36 | def wrapper(func): | ||
37 | @wraps(func) | ||
38 | def wrapped_f(*args, **kwargs): | ||
39 | arch = get_host_arch(image) | ||
40 | if archs and arch not in archs: | ||
41 | raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch) | ||
42 | return func(*args, **kwargs) | ||
43 | wrapped_f.__name__ = func.__name__ | ||
44 | return wrapped_f | ||
45 | return wrapper | ||
46 | |||
47 | def extract_files(debugfs_output): | 27 | def extract_files(debugfs_output): |
48 | """ | 28 | """ |
49 | extract file names from the output of debugfs -R 'ls -p', | 29 | extract file names from the output of debugfs -R 'ls -p', |
@@ -77,22 +57,18 @@ class WicTestCase(OESelftestTestCase): | |||
77 | 57 | ||
78 | def setUpLocal(self): | 58 | def setUpLocal(self): |
79 | """This code is executed before each test method.""" | 59 | """This code is executed before each test method.""" |
80 | self.resultdir = self.builddir + "/wic-tmp/" | 60 | self.resultdir = os.path.join(self.builddir, "wic-tmp") |
81 | super(WicTestCase, self).setUpLocal() | 61 | super(WicTestCase, self).setUpLocal() |
82 | 62 | ||
83 | # Do this here instead of in setUpClass as the base setUp does some | 63 | # Do this here instead of in setUpClass as the base setUp does some |
84 | # clean up which can result in the native tools built earlier in | 64 | # clean up which can result in the native tools built earlier in |
85 | # setUpClass being unavailable. | 65 | # setUpClass being unavailable. |
86 | if not WicTestCase.image_is_ready: | 66 | if not WicTestCase.image_is_ready: |
87 | if get_bb_var('USE_NLS') == 'yes': | 67 | if self.td['USE_NLS'] != 'yes': |
88 | bitbake('wic-tools') | 68 | self.skipTest('wic-tools needs USE_NLS=yes') |
89 | else: | ||
90 | self.skipTest('wic-tools cannot be built due its (intltool|gettext)-native dependency and NLS disable') | ||
91 | 69 | ||
92 | bitbake('core-image-minimal') | 70 | bitbake('wic-tools core-image-minimal core-image-minimal-mtdutils') |
93 | bitbake('core-image-minimal-mtdutils') | ||
94 | WicTestCase.image_is_ready = True | 71 | WicTestCase.image_is_ready = True |
95 | |||
96 | rmtree(self.resultdir, ignore_errors=True) | 72 | rmtree(self.resultdir, ignore_errors=True) |
97 | 73 | ||
98 | def tearDownLocal(self): | 74 | def tearDownLocal(self): |
@@ -103,15 +79,13 @@ class WicTestCase(OESelftestTestCase): | |||
103 | def _get_image_env_path(self, image): | 79 | def _get_image_env_path(self, image): |
104 | """Generate and obtain the path to <image>.env""" | 80 | """Generate and obtain the path to <image>.env""" |
105 | if image not in WicTestCase.wicenv_cache: | 81 | if image not in WicTestCase.wicenv_cache: |
106 | self.assertEqual(0, bitbake('%s -c do_rootfs_wicenv' % image).status) | 82 | bitbake('%s -c do_rootfs_wicenv' % image) |
107 | bb_vars = get_bb_vars(['STAGING_DIR', 'MACHINE'], image) | 83 | stdir = get_bb_var('STAGING_DIR', image) |
108 | stdir = bb_vars['STAGING_DIR'] | 84 | machine = self.td["MACHINE"] |
109 | machine = bb_vars['MACHINE'] | ||
110 | WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata') | 85 | WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata') |
111 | return WicTestCase.wicenv_cache[image] | 86 | return WicTestCase.wicenv_cache[image] |
112 | 87 | ||
113 | class Wic(WicTestCase): | 88 | class CLITests(OESelftestTestCase): |
114 | |||
115 | def test_version(self): | 89 | def test_version(self): |
116 | """Test wic --version""" | 90 | """Test wic --version""" |
117 | runCmd('wic --version') | 91 | runCmd('wic --version') |
@@ -172,68 +146,136 @@ class Wic(WicTestCase): | |||
172 | """Test wic without command""" | 146 | """Test wic without command""" |
173 | self.assertEqual(1, runCmd('wic', ignore_status=True).status) | 147 | self.assertEqual(1, runCmd('wic', ignore_status=True).status) |
174 | 148 | ||
149 | class Wic(WicTestCase): | ||
150 | def test_skip_kernel_install(self): | ||
151 | """Test the functionality of not installing the kernel in the boot directory using the wic plugin""" | ||
152 | # create a temporary file for the WKS content | ||
153 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
154 | wks.write( | ||
155 | 'part --source bootimg-efi ' | ||
156 | '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" ' | ||
157 | '--label boot --active\n' | ||
158 | ) | ||
159 | wks.flush() | ||
160 | # create a temporary directory to extract the disk image to | ||
161 | with TemporaryDirectory() as tmpdir: | ||
162 | img = 'core-image-minimal' | ||
163 | # build the image using the WKS file | ||
164 | cmd = "wic create %s -e %s -o %s" % ( | ||
165 | wks.name, img, self.resultdir) | ||
166 | runCmd(cmd) | ||
167 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | ||
168 | out = glob(os.path.join( | ||
169 | self.resultdir, "%s-*.direct" % wksname)) | ||
170 | self.assertEqual(1, len(out)) | ||
171 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
172 | # extract the content of the disk image to the temporary directory | ||
173 | cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot) | ||
174 | runCmd(cmd) | ||
175 | # check if the kernel is installed or not | ||
176 | kimgtype = get_bb_var('KERNEL_IMAGETYPE', img) | ||
177 | for file in os.listdir(tmpdir): | ||
178 | if file == kimgtype: | ||
179 | raise AssertionError( | ||
180 | "The kernel image '{}' was found in the partition".format(kimgtype) | ||
181 | ) | ||
182 | |||
183 | def test_kernel_install(self): | ||
184 | """Test the installation of the kernel to the boot directory in the wic plugin""" | ||
185 | # create a temporary file for the WKS content | ||
186 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
187 | wks.write( | ||
188 | 'part --source bootimg-efi ' | ||
189 | '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" ' | ||
190 | '--label boot --active\n' | ||
191 | ) | ||
192 | wks.flush() | ||
193 | # create a temporary directory to extract the disk image to | ||
194 | with TemporaryDirectory() as tmpdir: | ||
195 | img = 'core-image-minimal' | ||
196 | # build the image using the WKS file | ||
197 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | ||
198 | runCmd(cmd) | ||
199 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | ||
200 | out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname)) | ||
201 | self.assertEqual(1, len(out)) | ||
202 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
203 | # extract the content of the disk image to the temporary directory | ||
204 | cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot) | ||
205 | runCmd(cmd) | ||
206 | # check if the kernel is installed or not | ||
207 | kimgtype = get_bb_var('KERNEL_IMAGETYPE', img) | ||
208 | found = False | ||
209 | for file in os.listdir(tmpdir): | ||
210 | if file == kimgtype: | ||
211 | found = True | ||
212 | break | ||
213 | self.assertTrue( | ||
214 | found, "The kernel image '{}' was not found in the boot partition".format(kimgtype) | ||
215 | ) | ||
216 | |||
175 | def test_build_image_name(self): | 217 | def test_build_image_name(self): |
176 | """Test wic create wictestdisk --image-name=core-image-minimal""" | 218 | """Test wic create wictestdisk --image-name=core-image-minimal""" |
177 | cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir | 219 | cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir |
178 | runCmd(cmd) | 220 | runCmd(cmd) |
179 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) | 221 | self.assertEqual(1, len(glob(os.path.join (self.resultdir, "wictestdisk-*.direct")))) |
180 | 222 | ||
181 | @only_for_arch(['i586', 'i686', 'x86_64']) | 223 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
182 | def test_gpt_image(self): | 224 | def test_gpt_image(self): |
183 | """Test creation of core-image-minimal with gpt table and UUID boot""" | 225 | """Test creation of core-image-minimal with gpt table and UUID boot""" |
184 | cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir | 226 | cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir |
185 | runCmd(cmd) | 227 | runCmd(cmd) |
186 | self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) | 228 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct")))) |
187 | 229 | ||
188 | @only_for_arch(['i586', 'i686', 'x86_64']) | 230 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
189 | def test_iso_image(self): | 231 | def test_iso_image(self): |
190 | """Test creation of hybrid iso image with legacy and EFI boot""" | 232 | """Test creation of hybrid iso image with legacy and EFI boot""" |
191 | config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\ | 233 | config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\ |
192 | 'MACHINE_FEATURES_append = " efi"\n'\ | 234 | 'MACHINE_FEATURES:append = " efi"\n'\ |
193 | 'DEPENDS_pn-core-image-minimal += "syslinux"\n' | 235 | 'DEPENDS:pn-core-image-minimal += "syslinux"\n' |
194 | self.append_config(config) | 236 | self.append_config(config) |
195 | bitbake('core-image-minimal core-image-minimal-initramfs') | 237 | bitbake('core-image-minimal core-image-minimal-initramfs') |
196 | self.remove_config(config) | 238 | self.remove_config(config) |
197 | cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir | 239 | cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir |
198 | runCmd(cmd) | 240 | runCmd(cmd) |
199 | self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct"))) | 241 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "HYBRID_ISO_IMG-*.direct")))) |
200 | self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso"))) | 242 | self.assertEqual(1, len(glob(os.path.join (self.resultdir, "HYBRID_ISO_IMG-*.iso")))) |
201 | 243 | ||
202 | @only_for_arch(['i586', 'i686', 'x86_64']) | 244 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
203 | def test_qemux86_directdisk(self): | 245 | def test_qemux86_directdisk(self): |
204 | """Test creation of qemux-86-directdisk image""" | 246 | """Test creation of qemux-86-directdisk image""" |
205 | cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir | 247 | cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir |
206 | runCmd(cmd) | 248 | runCmd(cmd) |
207 | self.assertEqual(1, len(glob(self.resultdir + "qemux86-directdisk-*direct"))) | 249 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "qemux86-directdisk-*direct")))) |
208 | 250 | ||
209 | @only_for_arch(['i586', 'i686', 'x86_64']) | 251 | @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64']) |
210 | def test_mkefidisk(self): | 252 | def test_mkefidisk(self): |
211 | """Test creation of mkefidisk image""" | 253 | """Test creation of mkefidisk image""" |
212 | cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir | 254 | cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir |
213 | runCmd(cmd) | 255 | runCmd(cmd) |
214 | self.assertEqual(1, len(glob(self.resultdir + "mkefidisk-*direct"))) | 256 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "mkefidisk-*direct")))) |
215 | 257 | ||
216 | @only_for_arch(['i586', 'i686', 'x86_64']) | 258 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
217 | def test_bootloader_config(self): | 259 | def test_bootloader_config(self): |
218 | """Test creation of directdisk-bootloader-config image""" | 260 | """Test creation of directdisk-bootloader-config image""" |
219 | config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n' | 261 | config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n' |
220 | self.append_config(config) | 262 | self.append_config(config) |
221 | bitbake('core-image-minimal') | 263 | bitbake('core-image-minimal') |
222 | self.remove_config(config) | 264 | self.remove_config(config) |
223 | cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir | 265 | cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir |
224 | runCmd(cmd) | 266 | runCmd(cmd) |
225 | self.assertEqual(1, len(glob(self.resultdir + "directdisk-bootloader-config-*direct"))) | 267 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-bootloader-config-*direct")))) |
226 | 268 | ||
227 | @only_for_arch(['i586', 'i686', 'x86_64']) | 269 | @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64']) |
228 | def test_systemd_bootdisk(self): | 270 | def test_systemd_bootdisk(self): |
229 | """Test creation of systemd-bootdisk image""" | 271 | """Test creation of systemd-bootdisk image""" |
230 | config = 'MACHINE_FEATURES_append = " efi"\n' | 272 | config = 'MACHINE_FEATURES:append = " efi"\n' |
231 | self.append_config(config) | 273 | self.append_config(config) |
232 | bitbake('core-image-minimal') | 274 | bitbake('core-image-minimal') |
233 | self.remove_config(config) | 275 | self.remove_config(config) |
234 | cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir | 276 | cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir |
235 | runCmd(cmd) | 277 | runCmd(cmd) |
236 | self.assertEqual(1, len(glob(self.resultdir + "systemd-bootdisk-*direct"))) | 278 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "systemd-bootdisk-*direct")))) |
237 | 279 | ||
238 | def test_efi_bootpart(self): | 280 | def test_efi_bootpart(self): |
239 | """Test creation of efi-bootpart image""" | 281 | """Test creation of efi-bootpart image""" |
@@ -242,7 +284,7 @@ class Wic(WicTestCase): | |||
242 | self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype) | 284 | self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype) |
243 | runCmd(cmd) | 285 | runCmd(cmd) |
244 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | 286 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') |
245 | images = glob(self.resultdir + "mkefidisk-*.direct") | 287 | images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct")) |
246 | result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) | 288 | result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) |
247 | self.assertIn("kernel",result.output) | 289 | self.assertIn("kernel",result.output) |
248 | 290 | ||
@@ -252,14 +294,15 @@ class Wic(WicTestCase): | |||
252 | kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal') | 294 | kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal') |
253 | self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype) | 295 | self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype) |
254 | runCmd(cmd) | 296 | runCmd(cmd) |
255 | self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) | 297 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct")))) |
256 | 298 | ||
257 | @only_for_arch(['i586', 'i686', 'x86_64']) | 299 | # TODO this doesn't have to be x86-specific |
300 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
258 | def test_default_output_dir(self): | 301 | def test_default_output_dir(self): |
259 | """Test default output location""" | 302 | """Test default output location""" |
260 | for fname in glob("directdisk-*.direct"): | 303 | for fname in glob("directdisk-*.direct"): |
261 | os.remove(fname) | 304 | os.remove(fname) |
262 | config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n' | 305 | config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n' |
263 | self.append_config(config) | 306 | self.append_config(config) |
264 | bitbake('core-image-minimal') | 307 | bitbake('core-image-minimal') |
265 | self.remove_config(config) | 308 | self.remove_config(config) |
@@ -267,7 +310,7 @@ class Wic(WicTestCase): | |||
267 | runCmd(cmd) | 310 | runCmd(cmd) |
268 | self.assertEqual(1, len(glob("directdisk-*.direct"))) | 311 | self.assertEqual(1, len(glob("directdisk-*.direct"))) |
269 | 312 | ||
270 | @only_for_arch(['i586', 'i686', 'x86_64']) | 313 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
271 | def test_build_artifacts(self): | 314 | def test_build_artifacts(self): |
272 | """Test wic create directdisk providing all artifacts.""" | 315 | """Test wic create directdisk providing all artifacts.""" |
273 | bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], | 316 | bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], |
@@ -282,28 +325,28 @@ class Wic(WicTestCase): | |||
282 | "-n %(recipe_sysroot_native)s " | 325 | "-n %(recipe_sysroot_native)s " |
283 | "-r %(image_rootfs)s " | 326 | "-r %(image_rootfs)s " |
284 | "-o %(resultdir)s" % bbvars) | 327 | "-o %(resultdir)s" % bbvars) |
285 | self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) | 328 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct")))) |
286 | 329 | ||
287 | def test_compress_gzip(self): | 330 | def test_compress_gzip(self): |
288 | """Test compressing an image with gzip""" | 331 | """Test compressing an image with gzip""" |
289 | runCmd("wic create wictestdisk " | 332 | runCmd("wic create wictestdisk " |
290 | "--image-name core-image-minimal " | 333 | "--image-name core-image-minimal " |
291 | "-c gzip -o %s" % self.resultdir) | 334 | "-c gzip -o %s" % self.resultdir) |
292 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.gz"))) | 335 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.gz")))) |
293 | 336 | ||
294 | def test_compress_bzip2(self): | 337 | def test_compress_bzip2(self): |
295 | """Test compressing an image with bzip2""" | 338 | """Test compressing an image with bzip2""" |
296 | runCmd("wic create wictestdisk " | 339 | runCmd("wic create wictestdisk " |
297 | "--image-name=core-image-minimal " | 340 | "--image-name=core-image-minimal " |
298 | "-c bzip2 -o %s" % self.resultdir) | 341 | "-c bzip2 -o %s" % self.resultdir) |
299 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.bz2"))) | 342 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.bz2")))) |
300 | 343 | ||
301 | def test_compress_xz(self): | 344 | def test_compress_xz(self): |
302 | """Test compressing an image with xz""" | 345 | """Test compressing an image with xz""" |
303 | runCmd("wic create wictestdisk " | 346 | runCmd("wic create wictestdisk " |
304 | "--image-name=core-image-minimal " | 347 | "--image-name=core-image-minimal " |
305 | "--compress-with=xz -o %s" % self.resultdir) | 348 | "--compress-with=xz -o %s" % self.resultdir) |
306 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.xz"))) | 349 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.xz")))) |
307 | 350 | ||
308 | def test_wrong_compressor(self): | 351 | def test_wrong_compressor(self): |
309 | """Test how wic breaks if wrong compressor is provided""" | 352 | """Test how wic breaks if wrong compressor is provided""" |
@@ -317,23 +360,23 @@ class Wic(WicTestCase): | |||
317 | runCmd("wic create wictestdisk " | 360 | runCmd("wic create wictestdisk " |
318 | "--image-name=core-image-minimal " | 361 | "--image-name=core-image-minimal " |
319 | "-D -o %s" % self.resultdir) | 362 | "-D -o %s" % self.resultdir) |
320 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) | 363 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct")))) |
321 | self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*"))) | 364 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*")))) |
322 | 365 | ||
323 | def test_debug_long(self): | 366 | def test_debug_long(self): |
324 | """Test --debug option""" | 367 | """Test --debug option""" |
325 | runCmd("wic create wictestdisk " | 368 | runCmd("wic create wictestdisk " |
326 | "--image-name=core-image-minimal " | 369 | "--image-name=core-image-minimal " |
327 | "--debug -o %s" % self.resultdir) | 370 | "--debug -o %s" % self.resultdir) |
328 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) | 371 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct")))) |
329 | self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*"))) | 372 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*")))) |
330 | 373 | ||
331 | def test_skip_build_check_short(self): | 374 | def test_skip_build_check_short(self): |
332 | """Test -s option""" | 375 | """Test -s option""" |
333 | runCmd("wic create wictestdisk " | 376 | runCmd("wic create wictestdisk " |
334 | "--image-name=core-image-minimal " | 377 | "--image-name=core-image-minimal " |
335 | "-s -o %s" % self.resultdir) | 378 | "-s -o %s" % self.resultdir) |
336 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) | 379 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct")))) |
337 | 380 | ||
338 | def test_skip_build_check_long(self): | 381 | def test_skip_build_check_long(self): |
339 | """Test --skip-build-check option""" | 382 | """Test --skip-build-check option""" |
@@ -341,14 +384,14 @@ class Wic(WicTestCase): | |||
341 | "--image-name=core-image-minimal " | 384 | "--image-name=core-image-minimal " |
342 | "--skip-build-check " | 385 | "--skip-build-check " |
343 | "--outdir %s" % self.resultdir) | 386 | "--outdir %s" % self.resultdir) |
344 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) | 387 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct")))) |
345 | 388 | ||
346 | def test_build_rootfs_short(self): | 389 | def test_build_rootfs_short(self): |
347 | """Test -f option""" | 390 | """Test -f option""" |
348 | runCmd("wic create wictestdisk " | 391 | runCmd("wic create wictestdisk " |
349 | "--image-name=core-image-minimal " | 392 | "--image-name=core-image-minimal " |
350 | "-f -o %s" % self.resultdir) | 393 | "-f -o %s" % self.resultdir) |
351 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) | 394 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct")))) |
352 | 395 | ||
353 | def test_build_rootfs_long(self): | 396 | def test_build_rootfs_long(self): |
354 | """Test --build-rootfs option""" | 397 | """Test --build-rootfs option""" |
@@ -356,9 +399,10 @@ class Wic(WicTestCase): | |||
356 | "--image-name=core-image-minimal " | 399 | "--image-name=core-image-minimal " |
357 | "--build-rootfs " | 400 | "--build-rootfs " |
358 | "--outdir %s" % self.resultdir) | 401 | "--outdir %s" % self.resultdir) |
359 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) | 402 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct")))) |
360 | 403 | ||
361 | @only_for_arch(['i586', 'i686', 'x86_64']) | 404 | # TODO this doesn't have to be x86-specific |
405 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
362 | def test_rootfs_indirect_recipes(self): | 406 | def test_rootfs_indirect_recipes(self): |
363 | """Test usage of rootfs plugin with rootfs recipes""" | 407 | """Test usage of rootfs plugin with rootfs recipes""" |
364 | runCmd("wic create directdisk-multi-rootfs " | 408 | runCmd("wic create directdisk-multi-rootfs " |
@@ -366,9 +410,10 @@ class Wic(WicTestCase): | |||
366 | "--rootfs rootfs1=core-image-minimal " | 410 | "--rootfs rootfs1=core-image-minimal " |
367 | "--rootfs rootfs2=core-image-minimal " | 411 | "--rootfs rootfs2=core-image-minimal " |
368 | "--outdir %s" % self.resultdir) | 412 | "--outdir %s" % self.resultdir) |
369 | self.assertEqual(1, len(glob(self.resultdir + "directdisk-multi-rootfs*.direct"))) | 413 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-multi-rootfs*.direct")))) |
370 | 414 | ||
371 | @only_for_arch(['i586', 'i686', 'x86_64']) | 415 | # TODO this doesn't have to be x86-specific |
416 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
372 | def test_rootfs_artifacts(self): | 417 | def test_rootfs_artifacts(self): |
373 | """Test usage of rootfs plugin with rootfs paths""" | 418 | """Test usage of rootfs plugin with rootfs paths""" |
374 | bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], | 419 | bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], |
@@ -385,7 +430,7 @@ class Wic(WicTestCase): | |||
385 | "--rootfs-dir rootfs1=%(image_rootfs)s " | 430 | "--rootfs-dir rootfs1=%(image_rootfs)s " |
386 | "--rootfs-dir rootfs2=%(image_rootfs)s " | 431 | "--rootfs-dir rootfs2=%(image_rootfs)s " |
387 | "--outdir %(resultdir)s" % bbvars) | 432 | "--outdir %(resultdir)s" % bbvars) |
388 | self.assertEqual(1, len(glob(self.resultdir + "%(wks)s-*.direct" % bbvars))) | 433 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "%(wks)s-*.direct" % bbvars)))) |
389 | 434 | ||
390 | def test_exclude_path(self): | 435 | def test_exclude_path(self): |
391 | """Test --exclude-path wks option.""" | 436 | """Test --exclude-path wks option.""" |
@@ -406,7 +451,7 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r | |||
406 | % (wks_file, self.resultdir)) | 451 | % (wks_file, self.resultdir)) |
407 | 452 | ||
408 | os.remove(wks_file) | 453 | os.remove(wks_file) |
409 | wicout = glob(self.resultdir + "%s-*direct" % 'temp') | 454 | wicout = glob(os.path.join(self.resultdir, "%s-*direct" % 'temp')) |
410 | self.assertEqual(1, len(wicout)) | 455 | self.assertEqual(1, len(wicout)) |
411 | 456 | ||
412 | wicimg = wicout[0] | 457 | wicimg = wicout[0] |
@@ -686,21 +731,130 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc | |||
686 | % (wks_file, self.resultdir), ignore_status=True).status) | 731 | % (wks_file, self.resultdir), ignore_status=True).status) |
687 | os.remove(wks_file) | 732 | os.remove(wks_file) |
688 | 733 | ||
734 | def test_no_fstab_update(self): | ||
735 | """Test --no-fstab-update wks option.""" | ||
736 | |||
737 | oldpath = os.environ['PATH'] | ||
738 | os.environ['PATH'] = get_bb_var("PATH", "wic-tools") | ||
739 | |||
740 | # Get stock fstab from base-files recipe | ||
741 | bitbake('base-files -c do_install') | ||
742 | bf_fstab = os.path.join(get_bb_var('D', 'base-files'), 'etc', 'fstab') | ||
743 | self.assertEqual(True, os.path.exists(bf_fstab)) | ||
744 | bf_fstab_md5sum = runCmd('md5sum %s 2>/dev/null' % bf_fstab).output.split(" ")[0] | ||
745 | |||
746 | try: | ||
747 | no_fstab_update_path = os.path.join(self.resultdir, 'test-no-fstab-update') | ||
748 | os.makedirs(no_fstab_update_path) | ||
749 | wks_file = os.path.join(no_fstab_update_path, 'temp.wks') | ||
750 | with open(wks_file, 'w') as wks: | ||
751 | wks.writelines(['part / --source rootfs --fstype=ext4 --label rootfs\n', | ||
752 | 'part /mnt/p2 --source rootfs --rootfs-dir=core-image-minimal ', | ||
753 | '--fstype=ext4 --label p2 --no-fstab-update\n']) | ||
754 | runCmd("wic create %s -e core-image-minimal -o %s" \ | ||
755 | % (wks_file, self.resultdir)) | ||
756 | |||
757 | part_fstab_md5sum = [] | ||
758 | for i in range(1, 3): | ||
759 | part = glob(os.path.join(self.resultdir, 'temp-*.direct.p') + str(i))[0] | ||
760 | part_fstab = runCmd("debugfs -R 'cat etc/fstab' %s 2>/dev/null" % (part)) | ||
761 | part_fstab_md5sum.append(hashlib.md5((part_fstab.output + "\n\n").encode('utf-8')).hexdigest()) | ||
762 | |||
763 | # '/etc/fstab' in partition 2 should contain the same stock fstab file | ||
764 | # as the one installed by the base-file recipe. | ||
765 | self.assertEqual(bf_fstab_md5sum, part_fstab_md5sum[1]) | ||
766 | |||
767 | # '/etc/fstab' in partition 1 should contain an updated fstab file. | ||
768 | self.assertNotEqual(bf_fstab_md5sum, part_fstab_md5sum[0]) | ||
769 | |||
770 | finally: | ||
771 | os.environ['PATH'] = oldpath | ||
772 | |||
773 | def test_no_fstab_update_errors(self): | ||
774 | """Test --no-fstab-update wks option error handling.""" | ||
775 | wks_file = 'temp.wks' | ||
776 | |||
777 | # Absolute argument. | ||
778 | with open(wks_file, 'w') as wks: | ||
779 | wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update /etc") | ||
780 | self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \ | ||
781 | % (wks_file, self.resultdir), ignore_status=True).status) | ||
782 | os.remove(wks_file) | ||
783 | |||
784 | # Argument pointing to parent directory. | ||
785 | with open(wks_file, 'w') as wks: | ||
786 | wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update ././..") | ||
787 | self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \ | ||
788 | % (wks_file, self.resultdir), ignore_status=True).status) | ||
789 | os.remove(wks_file) | ||
790 | |||
791 | def test_extra_space(self): | ||
792 | """Test --extra-space wks option.""" | ||
793 | extraspace = 1024**3 | ||
794 | runCmd("wic create wictestdisk " | ||
795 | "--image-name core-image-minimal " | ||
796 | "--extra-space %i -o %s" % (extraspace ,self.resultdir)) | ||
797 | wicout = glob(os.path.join(self.resultdir, "wictestdisk-*.direct")) | ||
798 | self.assertEqual(1, len(wicout)) | ||
799 | size = os.path.getsize(wicout[0]) | ||
800 | self.assertTrue(size > extraspace, msg="Extra space not present (%s vs %s)" % (size, extraspace)) | ||
801 | |||
802 | def test_no_table(self): | ||
803 | """Test --no-table wks option.""" | ||
804 | wks_file = 'temp.wks' | ||
805 | |||
806 | # Absolute argument. | ||
807 | with open(wks_file, 'w') as wks: | ||
808 | wks.write("part testspace --no-table --fixed-size 16k --offset 4080k") | ||
809 | runCmd("wic create %s --image-name core-image-minimal -o %s" % (wks_file, self.resultdir)) | ||
810 | |||
811 | wicout = glob(os.path.join(self.resultdir, "*.*")) | ||
812 | |||
813 | self.assertEqual(1, len(wicout)) | ||
814 | size = os.path.getsize(wicout[0]) | ||
815 | self.assertEqual(size, 4 * 1024 * 1024) | ||
816 | |||
817 | os.remove(wks_file) | ||
818 | |||
819 | def test_partition_hidden_attributes(self): | ||
820 | """Test --hidden wks option.""" | ||
821 | wks_file = 'temp.wks' | ||
822 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
823 | try: | ||
824 | with open(wks_file, 'w') as wks: | ||
825 | wks.write(""" | ||
826 | part / --source rootfs --fstype=ext4 | ||
827 | part / --source rootfs --fstype=ext4 --hidden | ||
828 | bootloader --ptable gpt""") | ||
829 | |||
830 | runCmd("wic create %s -e core-image-minimal -o %s" \ | ||
831 | % (wks_file, self.resultdir)) | ||
832 | wicout = os.path.join(self.resultdir, "*.direct") | ||
833 | |||
834 | result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 1" % (sysroot, wicout)) | ||
835 | self.assertEqual('', result.output) | ||
836 | result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 2" % (sysroot, wicout)) | ||
837 | self.assertEqual('RequiredPartition', result.output) | ||
838 | |||
839 | finally: | ||
840 | os.remove(wks_file) | ||
841 | |||
842 | |||
689 | class Wic2(WicTestCase): | 843 | class Wic2(WicTestCase): |
690 | 844 | ||
691 | def test_bmap_short(self): | 845 | def test_bmap_short(self): |
692 | """Test generation of .bmap file -m option""" | 846 | """Test generation of .bmap file -m option""" |
693 | cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir | 847 | cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir |
694 | runCmd(cmd) | 848 | runCmd(cmd) |
695 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) | 849 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct")))) |
696 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) | 850 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap")))) |
697 | 851 | ||
698 | def test_bmap_long(self): | 852 | def test_bmap_long(self): |
699 | """Test generation of .bmap file --bmap option""" | 853 | """Test generation of .bmap file --bmap option""" |
700 | cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir | 854 | cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir |
701 | runCmd(cmd) | 855 | runCmd(cmd) |
702 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) | 856 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct")))) |
703 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) | 857 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap")))) |
704 | 858 | ||
705 | def test_image_env(self): | 859 | def test_image_env(self): |
706 | """Test generation of <image>.env files.""" | 860 | """Test generation of <image>.env files.""" |
@@ -711,7 +865,7 @@ class Wic2(WicTestCase): | |||
711 | basename = bb_vars['IMAGE_BASENAME'] | 865 | basename = bb_vars['IMAGE_BASENAME'] |
712 | self.assertEqual(basename, image) | 866 | self.assertEqual(basename, image) |
713 | path = os.path.join(imgdatadir, basename) + '.env' | 867 | path = os.path.join(imgdatadir, basename) + '.env' |
714 | self.assertTrue(os.path.isfile(path)) | 868 | self.assertTrue(os.path.isfile(path), msg="File %s wasn't generated as expected" % path) |
715 | 869 | ||
716 | wicvars = set(bb_vars['WICVARS'].split()) | 870 | wicvars = set(bb_vars['WICVARS'].split()) |
717 | # filter out optional variables | 871 | # filter out optional variables |
@@ -724,7 +878,7 @@ class Wic2(WicTestCase): | |||
724 | # test if variables used by wic present in the .env file | 878 | # test if variables used by wic present in the .env file |
725 | for var in wicvars: | 879 | for var in wicvars: |
726 | self.assertTrue(var in content, "%s is not in .env file" % var) | 880 | self.assertTrue(var in content, "%s is not in .env file" % var) |
727 | self.assertTrue(content[var]) | 881 | self.assertTrue(content[var], "%s doesn't have a value (%s)" % (var, content[var])) |
728 | 882 | ||
729 | def test_image_vars_dir_short(self): | 883 | def test_image_vars_dir_short(self): |
730 | """Test image vars directory selection -v option""" | 884 | """Test image vars directory selection -v option""" |
@@ -736,7 +890,7 @@ class Wic2(WicTestCase): | |||
736 | "--image-name=%s -v %s -n %s -o %s" | 890 | "--image-name=%s -v %s -n %s -o %s" |
737 | % (image, imgenvdir, native_sysroot, | 891 | % (image, imgenvdir, native_sysroot, |
738 | self.resultdir)) | 892 | self.resultdir)) |
739 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) | 893 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct")))) |
740 | 894 | ||
741 | def test_image_vars_dir_long(self): | 895 | def test_image_vars_dir_long(self): |
742 | """Test image vars directory selection --vars option""" | 896 | """Test image vars directory selection --vars option""" |
@@ -751,58 +905,62 @@ class Wic2(WicTestCase): | |||
751 | "--outdir %s" | 905 | "--outdir %s" |
752 | % (image, imgenvdir, native_sysroot, | 906 | % (image, imgenvdir, native_sysroot, |
753 | self.resultdir)) | 907 | self.resultdir)) |
754 | self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) | 908 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct")))) |
755 | 909 | ||
756 | @only_for_arch(['i586', 'i686', 'x86_64']) | 910 | # TODO this test could also work on aarch64 |
911 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
757 | def test_wic_image_type(self): | 912 | def test_wic_image_type(self): |
758 | """Test building wic images by bitbake""" | 913 | """Test building wic images by bitbake""" |
759 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ | 914 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ |
760 | 'MACHINE_FEATURES_append = " efi"\n' | 915 | 'MACHINE_FEATURES:append = " efi"\n' |
761 | self.append_config(config) | 916 | self.append_config(config) |
762 | self.assertEqual(0, bitbake('wic-image-minimal').status) | 917 | image = 'wic-image-minimal' |
918 | bitbake(image) | ||
763 | self.remove_config(config) | 919 | self.remove_config(config) |
764 | 920 | ||
765 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) | 921 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) |
766 | deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] | 922 | prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME']) |
767 | machine = bb_vars['MACHINE'] | 923 | |
768 | prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine) | ||
769 | # check if we have result image and manifests symlinks | 924 | # check if we have result image and manifests symlinks |
770 | # pointing to existing files | 925 | # pointing to existing files |
771 | for suffix in ('wic', 'manifest'): | 926 | for suffix in ('wic', 'manifest'): |
772 | path = prefix + suffix | 927 | path = prefix + suffix |
773 | self.assertTrue(os.path.islink(path)) | 928 | self.assertTrue(os.path.islink(path), msg="Link %s wasn't generated as expected" % path) |
774 | self.assertTrue(os.path.isfile(os.path.realpath(path))) | 929 | self.assertTrue(os.path.isfile(os.path.realpath(path)), msg="File linked to by %s wasn't generated as expected" % path) |
775 | 930 | ||
776 | @only_for_arch(['i586', 'i686', 'x86_64']) | 931 | # TODO this should work on aarch64 |
932 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
933 | @OETestTag("runqemu") | ||
777 | def test_qemu(self): | 934 | def test_qemu(self): |
778 | """Test wic-image-minimal under qemu""" | 935 | """Test wic-image-minimal under qemu""" |
779 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ | 936 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ |
780 | 'MACHINE_FEATURES_append = " efi"\n' | 937 | 'MACHINE_FEATURES:append = " efi"\n' |
781 | self.append_config(config) | 938 | self.append_config(config) |
782 | self.assertEqual(0, bitbake('wic-image-minimal').status) | 939 | bitbake('wic-image-minimal') |
783 | self.remove_config(config) | 940 | self.remove_config(config) |
784 | 941 | ||
785 | with runqemu('wic-image-minimal', ssh=False) as qemu: | 942 | with runqemu('wic-image-minimal', ssh=False, runqemuparams='nographic') as qemu: |
786 | cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \ | 943 | cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \ |
787 | "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'" | 944 | "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'" |
788 | status, output = qemu.run_serial(cmd) | 945 | status, output = qemu.run_serial(cmd) |
789 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 946 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
790 | self.assertEqual(output, '4') | 947 | self.assertEqual(output, '4') |
791 | cmd = "grep UUID= /etc/fstab" | 948 | cmd = "grep UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba /etc/fstab" |
792 | status, output = qemu.run_serial(cmd) | 949 | status, output = qemu.run_serial(cmd) |
793 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 950 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
794 | self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0') | 951 | self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0') |
795 | 952 | ||
796 | @only_for_arch(['i586', 'i686', 'x86_64']) | 953 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
954 | @OETestTag("runqemu") | ||
797 | def test_qemu_efi(self): | 955 | def test_qemu_efi(self): |
798 | """Test core-image-minimal efi image under qemu""" | 956 | """Test core-image-minimal efi image under qemu""" |
799 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n' | 957 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n' |
800 | self.append_config(config) | 958 | self.append_config(config) |
801 | self.assertEqual(0, bitbake('core-image-minimal ovmf').status) | 959 | bitbake('core-image-minimal ovmf') |
802 | self.remove_config(config) | 960 | self.remove_config(config) |
803 | 961 | ||
804 | with runqemu('core-image-minimal', ssh=False, | 962 | with runqemu('core-image-minimal', ssh=False, |
805 | runqemuparams='ovmf', image_fstype='wic') as qemu: | 963 | runqemuparams='nographic ovmf', image_fstype='wic') as qemu: |
806 | cmd = "grep sda. /proc/partitions |wc -l" | 964 | cmd = "grep sda. /proc/partitions |wc -l" |
807 | status, output = qemu.run_serial(cmd) | 965 | status, output = qemu.run_serial(cmd) |
808 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 966 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
@@ -831,7 +989,7 @@ class Wic2(WicTestCase): | |||
831 | 989 | ||
832 | wksname = os.path.splitext(os.path.basename(wkspath))[0] | 990 | wksname = os.path.splitext(os.path.basename(wkspath))[0] |
833 | 991 | ||
834 | wicout = glob(self.resultdir + "%s-*direct" % wksname) | 992 | wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname)) |
835 | 993 | ||
836 | if not wicout: | 994 | if not wicout: |
837 | return (p, None) | 995 | return (p, None) |
@@ -976,50 +1134,69 @@ class Wic2(WicTestCase): | |||
976 | size = int(size[:-3]) | 1134 | size = int(size[:-3]) |
977 | self.assertGreaterEqual(size, 204800) | 1135 | self.assertGreaterEqual(size, 204800) |
978 | 1136 | ||
979 | @only_for_arch(['i586', 'i686', 'x86_64']) | 1137 | # TODO this test could also work on aarch64 |
1138 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
1139 | @OETestTag("runqemu") | ||
980 | def test_rawcopy_plugin_qemu(self): | 1140 | def test_rawcopy_plugin_qemu(self): |
981 | """Test rawcopy plugin in qemu""" | 1141 | """Test rawcopy plugin in qemu""" |
982 | # build ext4 and wic images | 1142 | # build ext4 and then use it for a wic image |
983 | for fstype in ("ext4", "wic"): | 1143 | config = 'IMAGE_FSTYPES = "ext4"\n' |
984 | config = 'IMAGE_FSTYPES = "%s"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' % fstype | 1144 | self.append_config(config) |
985 | self.append_config(config) | 1145 | bitbake('core-image-minimal') |
986 | self.assertEqual(0, bitbake('core-image-minimal').status) | 1146 | image_link_name = get_bb_var('IMAGE_LINK_NAME', 'core-image-minimal') |
987 | self.remove_config(config) | 1147 | self.remove_config(config) |
988 | 1148 | ||
989 | with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: | 1149 | config = 'IMAGE_FSTYPES = "wic"\n' \ |
1150 | 'IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL = "%s"\n'\ | ||
1151 | 'WKS_FILE = "test_rawcopy_plugin.wks.in"\n'\ | ||
1152 | % image_link_name | ||
1153 | self.append_config(config) | ||
1154 | bitbake('core-image-minimal-mtdutils') | ||
1155 | self.remove_config(config) | ||
1156 | |||
1157 | with runqemu('core-image-minimal-mtdutils', ssh=False, | ||
1158 | runqemuparams='nographic', image_fstype='wic') as qemu: | ||
990 | cmd = "grep sda. /proc/partitions |wc -l" | 1159 | cmd = "grep sda. /proc/partitions |wc -l" |
991 | status, output = qemu.run_serial(cmd) | 1160 | status, output = qemu.run_serial(cmd) |
992 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 1161 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
993 | self.assertEqual(output, '2') | 1162 | self.assertEqual(output, '2') |
994 | 1163 | ||
995 | def test_rawcopy_plugin(self): | 1164 | def _rawcopy_plugin(self, fstype): |
996 | """Test rawcopy plugin""" | 1165 | """Test rawcopy plugin""" |
997 | img = 'core-image-minimal' | 1166 | image = 'core-image-minimal' |
998 | machine = get_bb_var('MACHINE', img) | 1167 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) |
1168 | params = ',unpack' if fstype.endswith('.gz') else '' | ||
999 | with NamedTemporaryFile("w", suffix=".wks") as wks: | 1169 | with NamedTemporaryFile("w", suffix=".wks") as wks: |
1000 | wks.writelines(['part /boot --active --source bootimg-pcbios\n', | 1170 | wks.write('part / --source rawcopy --sourceparams="file=%s.%s%s"\n'\ |
1001 | 'part / --source rawcopy --sourceparams="file=%s-%s.ext4" --use-uuid\n'\ | 1171 | % (bb_vars['IMAGE_LINK_NAME'], fstype, params)) |
1002 | % (img, machine), | ||
1003 | 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) | ||
1004 | wks.flush() | 1172 | wks.flush() |
1005 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | 1173 | cmd = "wic create %s -e %s -o %s" % (wks.name, image, self.resultdir) |
1006 | runCmd(cmd) | 1174 | runCmd(cmd) |
1007 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | 1175 | wksname = os.path.splitext(os.path.basename(wks.name))[0] |
1008 | out = glob(self.resultdir + "%s-*direct" % wksname) | 1176 | out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname)) |
1009 | self.assertEqual(1, len(out)) | 1177 | self.assertEqual(1, len(out)) |
1010 | 1178 | ||
1179 | def test_rawcopy_plugin(self): | ||
1180 | self._rawcopy_plugin('ext4') | ||
1181 | |||
1182 | def test_rawcopy_plugin_unpack(self): | ||
1183 | fstype = 'ext4.gz' | ||
1184 | config = 'IMAGE_FSTYPES = "%s"\n' % fstype | ||
1185 | self.append_config(config) | ||
1186 | self.assertEqual(0, bitbake('core-image-minimal').status) | ||
1187 | self.remove_config(config) | ||
1188 | self._rawcopy_plugin(fstype) | ||
1189 | |||
1011 | def test_empty_plugin(self): | 1190 | def test_empty_plugin(self): |
1012 | """Test empty plugin""" | 1191 | """Test empty plugin""" |
1013 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n' | 1192 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n' |
1014 | self.append_config(config) | 1193 | self.append_config(config) |
1015 | self.assertEqual(0, bitbake('core-image-minimal').status) | 1194 | image = 'core-image-minimal' |
1195 | bitbake(image) | ||
1016 | self.remove_config(config) | 1196 | self.remove_config(config) |
1017 | 1197 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) | |
1018 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) | 1198 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME']) |
1019 | deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] | 1199 | self.assertTrue(os.path.exists(image_path), msg="Image file %s wasn't generated as expected" % image_path) |
1020 | machine = bb_vars['MACHINE'] | ||
1021 | image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine) | ||
1022 | self.assertEqual(True, os.path.exists(image_path)) | ||
1023 | 1200 | ||
1024 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | 1201 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') |
1025 | 1202 | ||
@@ -1028,15 +1205,17 @@ class Wic2(WicTestCase): | |||
1028 | result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot)) | 1205 | result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot)) |
1029 | self.assertEqual('1', result.output) | 1206 | self.assertEqual('1', result.output) |
1030 | 1207 | ||
1031 | @only_for_arch(['i586', 'i686', 'x86_64']) | 1208 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
1209 | @OETestTag("runqemu") | ||
1032 | def test_biosplusefi_plugin_qemu(self): | 1210 | def test_biosplusefi_plugin_qemu(self): |
1033 | """Test biosplusefi plugin in qemu""" | 1211 | """Test biosplusefi plugin in qemu""" |
1034 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n' | 1212 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n' |
1035 | self.append_config(config) | 1213 | self.append_config(config) |
1036 | self.assertEqual(0, bitbake('core-image-minimal').status) | 1214 | bitbake('core-image-minimal') |
1037 | self.remove_config(config) | 1215 | self.remove_config(config) |
1038 | 1216 | ||
1039 | with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: | 1217 | with runqemu('core-image-minimal', ssh=False, |
1218 | runqemuparams='nographic', image_fstype='wic') as qemu: | ||
1040 | # Check that we have ONLY two /dev/sda* partitions (/boot and /) | 1219 | # Check that we have ONLY two /dev/sda* partitions (/boot and /) |
1041 | cmd = "grep sda. /proc/partitions | wc -l" | 1220 | cmd = "grep sda. /proc/partitions | wc -l" |
1042 | status, output = qemu.run_serial(cmd) | 1221 | status, output = qemu.run_serial(cmd) |
@@ -1059,7 +1238,7 @@ class Wic2(WicTestCase): | |||
1059 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 1238 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
1060 | self.assertEqual(output, '*') | 1239 | self.assertEqual(output, '*') |
1061 | 1240 | ||
1062 | @only_for_arch(['i586', 'i686', 'x86_64']) | 1241 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
1063 | def test_biosplusefi_plugin(self): | 1242 | def test_biosplusefi_plugin(self): |
1064 | """Test biosplusefi plugin""" | 1243 | """Test biosplusefi plugin""" |
1065 | # Wic generation below may fail depending on the order of the unittests | 1244 | # Wic generation below may fail depending on the order of the unittests |
@@ -1068,9 +1247,9 @@ class Wic2(WicTestCase): | |||
1068 | # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir() | 1247 | # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir() |
1069 | # will raise with "Couldn't find correct bootimg_dir" | 1248 | # will raise with "Couldn't find correct bootimg_dir" |
1070 | # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call | 1249 | # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call |
1071 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n' | 1250 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n' |
1072 | self.append_config(config) | 1251 | self.append_config(config) |
1073 | self.assertEqual(0, bitbake('core-image-minimal').status) | 1252 | bitbake('core-image-minimal') |
1074 | self.remove_config(config) | 1253 | self.remove_config(config) |
1075 | 1254 | ||
1076 | img = 'core-image-minimal' | 1255 | img = 'core-image-minimal' |
@@ -1082,9 +1261,60 @@ class Wic2(WicTestCase): | |||
1082 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | 1261 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) |
1083 | runCmd(cmd) | 1262 | runCmd(cmd) |
1084 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | 1263 | wksname = os.path.splitext(os.path.basename(wks.name))[0] |
1085 | out = glob(self.resultdir + "%s-*.direct" % wksname) | 1264 | out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname)) |
1265 | self.assertEqual(1, len(out)) | ||
1266 | |||
1267 | @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64']) | ||
1268 | def test_uefi_kernel(self): | ||
1269 | """ Test uefi-kernel in wic """ | ||
1270 | config = 'IMAGE_EFI_BOOT_FILES="/etc/fstab;testfile"\nIMAGE_FSTYPES = "wic"\nWKS_FILE = "test_uefikernel.wks"\nMACHINE_FEATURES:append = " efi"\n' | ||
1271 | self.append_config(config) | ||
1272 | bitbake('core-image-minimal') | ||
1273 | self.remove_config(config) | ||
1274 | |||
1275 | img = 'core-image-minimal' | ||
1276 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
1277 | wks.writelines(['part /boot --source bootimg-efi --sourceparams="loader=uefi-kernel"\n' | ||
1278 | 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\ | ||
1279 | 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) | ||
1280 | wks.flush() | ||
1281 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | ||
1282 | runCmd(cmd) | ||
1283 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | ||
1284 | out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname)) | ||
1086 | self.assertEqual(1, len(out)) | 1285 | self.assertEqual(1, len(out)) |
1087 | 1286 | ||
1287 | # TODO this test could also work on aarch64 | ||
1288 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
1289 | @OETestTag("runqemu") | ||
1290 | def test_efi_plugin_unified_kernel_image_qemu(self): | ||
1291 | """Test efi plugin's Unified Kernel Image feature in qemu""" | ||
1292 | config = 'IMAGE_FSTYPES = "wic"\n'\ | ||
1293 | 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\ | ||
1294 | 'WKS_FILE = "test_efi_plugin.wks"\n'\ | ||
1295 | 'MACHINE_FEATURES:append = " efi"\n' | ||
1296 | self.append_config(config) | ||
1297 | bitbake('core-image-minimal core-image-minimal-initramfs ovmf') | ||
1298 | self.remove_config(config) | ||
1299 | |||
1300 | with runqemu('core-image-minimal', ssh=False, | ||
1301 | runqemuparams='nographic ovmf', image_fstype='wic') as qemu: | ||
1302 | # Check that /boot has EFI bootx64.efi (required for EFI) | ||
1303 | cmd = "ls /boot/EFI/BOOT/bootx64.efi | wc -l" | ||
1304 | status, output = qemu.run_serial(cmd) | ||
1305 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1306 | self.assertEqual(output, '1') | ||
1307 | # Check that /boot has EFI/Linux/linux.efi (required for Unified Kernel Images auto detection) | ||
1308 | cmd = "ls /boot/EFI/Linux/linux.efi | wc -l" | ||
1309 | status, output = qemu.run_serial(cmd) | ||
1310 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1311 | self.assertEqual(output, '1') | ||
1312 | # Check that /boot doesn't have loader/entries/boot.conf (Unified Kernel Images are auto detected by the bootloader) | ||
1313 | cmd = "ls /boot/loader/entries/boot.conf 2&>/dev/null | wc -l" | ||
1314 | status, output = qemu.run_serial(cmd) | ||
1315 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1316 | self.assertEqual(output, '0') | ||
1317 | |||
1088 | def test_fs_types(self): | 1318 | def test_fs_types(self): |
1089 | """Test filesystem types for empty and not empty partitions""" | 1319 | """Test filesystem types for empty and not empty partitions""" |
1090 | img = 'core-image-minimal' | 1320 | img = 'core-image-minimal' |
@@ -1101,7 +1331,7 @@ class Wic2(WicTestCase): | |||
1101 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | 1331 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) |
1102 | runCmd(cmd) | 1332 | runCmd(cmd) |
1103 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | 1333 | wksname = os.path.splitext(os.path.basename(wks.name))[0] |
1104 | out = glob(self.resultdir + "%s-*direct" % wksname) | 1334 | out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname)) |
1105 | self.assertEqual(1, len(out)) | 1335 | self.assertEqual(1, len(out)) |
1106 | 1336 | ||
1107 | def test_kickstart_parser(self): | 1337 | def test_kickstart_parser(self): |
@@ -1113,7 +1343,7 @@ class Wic2(WicTestCase): | |||
1113 | cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir) | 1343 | cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir) |
1114 | runCmd(cmd) | 1344 | runCmd(cmd) |
1115 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | 1345 | wksname = os.path.splitext(os.path.basename(wks.name))[0] |
1116 | out = glob(self.resultdir + "%s-*direct" % wksname) | 1346 | out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname)) |
1117 | self.assertEqual(1, len(out)) | 1347 | self.assertEqual(1, len(out)) |
1118 | 1348 | ||
1119 | def test_image_bootpart_globbed(self): | 1349 | def test_image_bootpart_globbed(self): |
@@ -1124,11 +1354,11 @@ class Wic2(WicTestCase): | |||
1124 | self.append_config(config) | 1354 | self.append_config(config) |
1125 | runCmd(cmd) | 1355 | runCmd(cmd) |
1126 | self.remove_config(config) | 1356 | self.remove_config(config) |
1127 | self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) | 1357 | self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct")))) |
1128 | 1358 | ||
1129 | def test_sparse_copy(self): | 1359 | def test_sparse_copy(self): |
1130 | """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs""" | 1360 | """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs""" |
1131 | libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'wic') | 1361 | libpath = os.path.join(self.td['COREBASE'], 'scripts', 'lib', 'wic') |
1132 | sys.path.insert(0, libpath) | 1362 | sys.path.insert(0, libpath) |
1133 | from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp | 1363 | from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp |
1134 | with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse: | 1364 | with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse: |
@@ -1154,12 +1384,148 @@ class Wic2(WicTestCase): | |||
1154 | self.assertEqual(dest_stat.st_blocks, 8) | 1384 | self.assertEqual(dest_stat.st_blocks, 8) |
1155 | os.unlink(dest) | 1385 | os.unlink(dest) |
1156 | 1386 | ||
1387 | def test_mkfs_extraopts(self): | ||
1388 | """Test wks option --mkfs-extraopts for empty and not empty partitions""" | ||
1389 | img = 'core-image-minimal' | ||
1390 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
1391 | wks.writelines( | ||
1392 | ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n', | ||
1393 | "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n", | ||
1394 | 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n', | ||
1395 | 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n', | ||
1396 | 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n', | ||
1397 | 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n', | ||
1398 | 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n']) | ||
1399 | wks.flush() | ||
1400 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | ||
1401 | runCmd(cmd) | ||
1402 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | ||
1403 | out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname)) | ||
1404 | self.assertEqual(1, len(out)) | ||
1405 | |||
1406 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
1407 | @OETestTag("runqemu") | ||
1408 | def test_expand_mbr_image(self): | ||
1409 | """Test wic write --expand command for mbr image""" | ||
1410 | # build an image | ||
1411 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n' | ||
1412 | self.append_config(config) | ||
1413 | image = 'core-image-minimal' | ||
1414 | bitbake(image) | ||
1415 | |||
1416 | # get path to the image | ||
1417 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) | ||
1418 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME']) | ||
1419 | |||
1420 | self.remove_config(config) | ||
1421 | |||
1422 | try: | ||
1423 | # expand image to 1G | ||
1424 | new_image_path = None | ||
1425 | with NamedTemporaryFile(mode='wb', suffix='.wic.exp', | ||
1426 | dir=bb_vars['DEPLOY_DIR_IMAGE'], delete=False) as sparse: | ||
1427 | sparse.truncate(1024 ** 3) | ||
1428 | new_image_path = sparse.name | ||
1429 | |||
1430 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
1431 | cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path) | ||
1432 | runCmd(cmd) | ||
1433 | |||
1434 | # check if partitions are expanded | ||
1435 | orig = runCmd("wic ls %s -n %s" % (image_path, sysroot)) | ||
1436 | exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot)) | ||
1437 | orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]] | ||
1438 | exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]] | ||
1439 | self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized | ||
1440 | self.assertTrue(orig_sizes[1] < exp_sizes[1], msg="Parition size wasn't enlarged (%s vs %s)" % (orig_sizes[1], exp_sizes[1])) | ||
1441 | |||
1442 | # Check if all free space is partitioned | ||
1443 | result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path)) | ||
1444 | self.assertIn("0 B, 0 bytes, 0 sectors", result.output) | ||
1445 | |||
1446 | os.rename(image_path, image_path + '.bak') | ||
1447 | os.rename(new_image_path, image_path) | ||
1448 | |||
1449 | # Check if it boots in qemu | ||
1450 | with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu: | ||
1451 | cmd = "ls /etc/" | ||
1452 | status, output = qemu.run_serial('true') | ||
1453 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1454 | finally: | ||
1455 | if os.path.exists(new_image_path): | ||
1456 | os.unlink(new_image_path) | ||
1457 | if os.path.exists(image_path + '.bak'): | ||
1458 | os.rename(image_path + '.bak', image_path) | ||
1459 | |||
1460 | def test_gpt_partition_name(self): | ||
1461 | """Test --part-name argument to set partition name in GPT table""" | ||
1462 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "test_gpt_partition_name.wks"\n' | ||
1463 | self.append_config(config) | ||
1464 | image = 'core-image-minimal' | ||
1465 | bitbake(image) | ||
1466 | self.remove_config(config) | ||
1467 | deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE') | ||
1468 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) | ||
1469 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME']) | ||
1470 | |||
1471 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
1472 | |||
1473 | # Image is created | ||
1474 | self.assertTrue(os.path.exists(image_path), "image file %s doesn't exist" % image_path) | ||
1475 | |||
1476 | # Check the names of the three partitions | ||
1477 | # as listed in test_gpt_partition_name.wks | ||
1478 | result = runCmd("%s/usr/sbin/sfdisk --part-label %s 1" % (sysroot, image_path)) | ||
1479 | self.assertEqual('boot-A', result.output) | ||
1480 | result = runCmd("%s/usr/sbin/sfdisk --part-label %s 2" % (sysroot, image_path)) | ||
1481 | self.assertEqual('root-A', result.output) | ||
1482 | # When the --part-name is not defined, the partition name is equal to the --label | ||
1483 | result = runCmd("%s/usr/sbin/sfdisk --part-label %s 3" % (sysroot, image_path)) | ||
1484 | self.assertEqual('ext-space', result.output) | ||
1485 | |||
1486 | def test_empty_zeroize_plugin(self): | ||
1487 | img = 'core-image-minimal' | ||
1488 | expected_size = [ 1024*1024, # 1M | ||
1489 | 512*1024, # 512K | ||
1490 | 2*1024*1024] # 2M | ||
1491 | # Check combination of sourceparams | ||
1492 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
1493 | wks.writelines( | ||
1494 | ['part empty --source empty --sourceparams="fill" --ondisk sda --fixed-size 1M\n', | ||
1495 | 'part empty --source empty --sourceparams="size=512K" --ondisk sda --size 1M --align 1024\n', | ||
1496 | 'part empty --source empty --sourceparams="size=2048k,bs=512K" --ondisk sda --size 4M --align 1024\n' | ||
1497 | ]) | ||
1498 | wks.flush() | ||
1499 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | ||
1500 | runCmd(cmd) | ||
1501 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | ||
1502 | wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname)) | ||
1503 | # Skip the complete image and just look at the single partitions | ||
1504 | for idx, value in enumerate(wicout[1:]): | ||
1505 | self.logger.info(wicout[idx]) | ||
1506 | # Check if partitions are actually zeroized | ||
1507 | with open(wicout[idx], mode="rb") as fd: | ||
1508 | ba = bytearray(fd.read()) | ||
1509 | for b in ba: | ||
1510 | self.assertEqual(b, 0) | ||
1511 | self.assertEqual(expected_size[idx], os.path.getsize(wicout[idx])) | ||
1512 | |||
1513 | # Check inconsistancy check between "fill" and "--size" parameter | ||
1514 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
1515 | wks.writelines(['part empty --source empty --sourceparams="fill" --ondisk sda --size 1M\n']) | ||
1516 | wks.flush() | ||
1517 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | ||
1518 | result = runCmd(cmd, ignore_status=True) | ||
1519 | self.assertIn("Source parameter 'fill' only works with the '--fixed-size' option, exiting.", result.output) | ||
1520 | self.assertNotEqual(0, result.status) | ||
1521 | |||
1522 | class ModifyTests(WicTestCase): | ||
1157 | def test_wic_ls(self): | 1523 | def test_wic_ls(self): |
1158 | """Test listing image content using 'wic ls'""" | 1524 | """Test listing image content using 'wic ls'""" |
1159 | runCmd("wic create wictestdisk " | 1525 | runCmd("wic create wictestdisk " |
1160 | "--image-name=core-image-minimal " | 1526 | "--image-name=core-image-minimal " |
1161 | "-D -o %s" % self.resultdir) | 1527 | "-D -o %s" % self.resultdir) |
1162 | images = glob(self.resultdir + "wictestdisk-*.direct") | 1528 | images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct")) |
1163 | self.assertEqual(1, len(images)) | 1529 | self.assertEqual(1, len(images)) |
1164 | 1530 | ||
1165 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | 1531 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') |
@@ -1177,7 +1543,7 @@ class Wic2(WicTestCase): | |||
1177 | runCmd("wic create wictestdisk " | 1543 | runCmd("wic create wictestdisk " |
1178 | "--image-name=core-image-minimal " | 1544 | "--image-name=core-image-minimal " |
1179 | "-D -o %s" % self.resultdir) | 1545 | "-D -o %s" % self.resultdir) |
1180 | images = glob(self.resultdir + "wictestdisk-*.direct") | 1546 | images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct")) |
1181 | self.assertEqual(1, len(images)) | 1547 | self.assertEqual(1, len(images)) |
1182 | 1548 | ||
1183 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | 1549 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') |
@@ -1195,7 +1561,7 @@ class Wic2(WicTestCase): | |||
1195 | # check if file is there | 1561 | # check if file is there |
1196 | result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) | 1562 | result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) |
1197 | self.assertEqual(7, len(result.output.split('\n'))) | 1563 | self.assertEqual(7, len(result.output.split('\n'))) |
1198 | self.assertTrue(os.path.basename(testfile.name) in result.output) | 1564 | self.assertIn(os.path.basename(testfile.name), result.output) |
1199 | 1565 | ||
1200 | # prepare directory | 1566 | # prepare directory |
1201 | testdir = os.path.join(self.resultdir, 'wic-test-cp-dir') | 1567 | testdir = os.path.join(self.resultdir, 'wic-test-cp-dir') |
@@ -1209,13 +1575,13 @@ class Wic2(WicTestCase): | |||
1209 | # check if directory is there | 1575 | # check if directory is there |
1210 | result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) | 1576 | result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) |
1211 | self.assertEqual(8, len(result.output.split('\n'))) | 1577 | self.assertEqual(8, len(result.output.split('\n'))) |
1212 | self.assertTrue(os.path.basename(testdir) in result.output) | 1578 | self.assertIn(os.path.basename(testdir), result.output) |
1213 | 1579 | ||
1214 | # copy the file from the partition and check if it success | 1580 | # copy the file from the partition and check if it success |
1215 | dest = '%s-cp' % testfile.name | 1581 | dest = '%s-cp' % testfile.name |
1216 | runCmd("wic cp %s:1/%s %s -n %s" % (images[0], | 1582 | runCmd("wic cp %s:1/%s %s -n %s" % (images[0], |
1217 | os.path.basename(testfile.name), dest, sysroot)) | 1583 | os.path.basename(testfile.name), dest, sysroot)) |
1218 | self.assertTrue(os.path.exists(dest)) | 1584 | self.assertTrue(os.path.exists(dest), msg="File %s wasn't generated as expected" % dest) |
1219 | 1585 | ||
1220 | 1586 | ||
1221 | def test_wic_rm(self): | 1587 | def test_wic_rm(self): |
@@ -1223,105 +1589,35 @@ class Wic2(WicTestCase): | |||
1223 | runCmd("wic create mkefidisk " | 1589 | runCmd("wic create mkefidisk " |
1224 | "--image-name=core-image-minimal " | 1590 | "--image-name=core-image-minimal " |
1225 | "-D -o %s" % self.resultdir) | 1591 | "-D -o %s" % self.resultdir) |
1226 | images = glob(self.resultdir + "mkefidisk-*.direct") | 1592 | images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct")) |
1227 | self.assertEqual(1, len(images)) | 1593 | self.assertEqual(1, len(images)) |
1228 | 1594 | ||
1229 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | 1595 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') |
1596 | # Not bulletproof but hopefully sufficient | ||
1597 | kerneltype = get_bb_var('KERNEL_IMAGETYPE', 'virtual/kernel') | ||
1230 | 1598 | ||
1231 | # list directory content of the first partition | 1599 | # list directory content of the first partition |
1232 | result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) | 1600 | result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) |
1233 | self.assertIn('\nBZIMAGE ', result.output) | 1601 | self.assertIn('\n%s ' % kerneltype.upper(), result.output) |
1234 | self.assertIn('\nEFI <DIR> ', result.output) | 1602 | self.assertIn('\nEFI <DIR> ', result.output) |
1235 | 1603 | ||
1236 | # remove file | 1604 | # remove file. EFI partitions are case-insensitive so exercise that too |
1237 | runCmd("wic rm %s:1/bzimage -n %s" % (images[0], sysroot)) | 1605 | runCmd("wic rm %s:1/%s -n %s" % (images[0], kerneltype.lower(), sysroot)) |
1238 | 1606 | ||
1239 | # remove directory | 1607 | # remove directory |
1240 | runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot)) | 1608 | runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot)) |
1241 | 1609 | ||
1242 | # check if they're removed | 1610 | # check if they're removed |
1243 | result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) | 1611 | result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) |
1244 | self.assertNotIn('\nBZIMAGE ', result.output) | 1612 | self.assertNotIn('\n%s ' % kerneltype.upper(), result.output) |
1245 | self.assertNotIn('\nEFI <DIR> ', result.output) | 1613 | self.assertNotIn('\nEFI <DIR> ', result.output) |
1246 | 1614 | ||
1247 | def test_mkfs_extraopts(self): | ||
1248 | """Test wks option --mkfs-extraopts for empty and not empty partitions""" | ||
1249 | img = 'core-image-minimal' | ||
1250 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
1251 | wks.writelines( | ||
1252 | ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n', | ||
1253 | "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n", | ||
1254 | 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n', | ||
1255 | 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n', | ||
1256 | 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n', | ||
1257 | 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n', | ||
1258 | 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n']) | ||
1259 | wks.flush() | ||
1260 | cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) | ||
1261 | runCmd(cmd) | ||
1262 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | ||
1263 | out = glob(self.resultdir + "%s-*direct" % wksname) | ||
1264 | self.assertEqual(1, len(out)) | ||
1265 | |||
1266 | def test_expand_mbr_image(self): | ||
1267 | """Test wic write --expand command for mbr image""" | ||
1268 | # build an image | ||
1269 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n' | ||
1270 | self.append_config(config) | ||
1271 | self.assertEqual(0, bitbake('core-image-minimal').status) | ||
1272 | |||
1273 | # get path to the image | ||
1274 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) | ||
1275 | deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] | ||
1276 | machine = bb_vars['MACHINE'] | ||
1277 | image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine) | ||
1278 | |||
1279 | self.remove_config(config) | ||
1280 | |||
1281 | try: | ||
1282 | # expand image to 1G | ||
1283 | new_image_path = None | ||
1284 | with NamedTemporaryFile(mode='wb', suffix='.wic.exp', | ||
1285 | dir=deploy_dir, delete=False) as sparse: | ||
1286 | sparse.truncate(1024 ** 3) | ||
1287 | new_image_path = sparse.name | ||
1288 | |||
1289 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
1290 | cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path) | ||
1291 | runCmd(cmd) | ||
1292 | |||
1293 | # check if partitions are expanded | ||
1294 | orig = runCmd("wic ls %s -n %s" % (image_path, sysroot)) | ||
1295 | exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot)) | ||
1296 | orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]] | ||
1297 | exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]] | ||
1298 | self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized | ||
1299 | self.assertTrue(orig_sizes[1] < exp_sizes[1]) | ||
1300 | |||
1301 | # Check if all free space is partitioned | ||
1302 | result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path)) | ||
1303 | self.assertTrue("0 B, 0 bytes, 0 sectors" in result.output) | ||
1304 | |||
1305 | os.rename(image_path, image_path + '.bak') | ||
1306 | os.rename(new_image_path, image_path) | ||
1307 | |||
1308 | # Check if it boots in qemu | ||
1309 | with runqemu('core-image-minimal', ssh=False) as qemu: | ||
1310 | cmd = "ls /etc/" | ||
1311 | status, output = qemu.run_serial('true') | ||
1312 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1313 | finally: | ||
1314 | if os.path.exists(new_image_path): | ||
1315 | os.unlink(new_image_path) | ||
1316 | if os.path.exists(image_path + '.bak'): | ||
1317 | os.rename(image_path + '.bak', image_path) | ||
1318 | |||
1319 | def test_wic_ls_ext(self): | 1615 | def test_wic_ls_ext(self): |
1320 | """Test listing content of the ext partition using 'wic ls'""" | 1616 | """Test listing content of the ext partition using 'wic ls'""" |
1321 | runCmd("wic create wictestdisk " | 1617 | runCmd("wic create wictestdisk " |
1322 | "--image-name=core-image-minimal " | 1618 | "--image-name=core-image-minimal " |
1323 | "-D -o %s" % self.resultdir) | 1619 | "-D -o %s" % self.resultdir) |
1324 | images = glob(self.resultdir + "wictestdisk-*.direct") | 1620 | images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct")) |
1325 | self.assertEqual(1, len(images)) | 1621 | self.assertEqual(1, len(images)) |
1326 | 1622 | ||
1327 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | 1623 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') |
@@ -1329,14 +1625,14 @@ class Wic2(WicTestCase): | |||
1329 | # list directory content of the second ext4 partition | 1625 | # list directory content of the second ext4 partition |
1330 | result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) | 1626 | result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) |
1331 | self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset( | 1627 | self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset( |
1332 | set(line.split()[-1] for line in result.output.split('\n') if line))) | 1628 | set(line.split()[-1] for line in result.output.split('\n') if line)), msg="Expected directories not present %s" % result.output) |
1333 | 1629 | ||
1334 | def test_wic_cp_ext(self): | 1630 | def test_wic_cp_ext(self): |
1335 | """Test copy files and directories to the ext partition.""" | 1631 | """Test copy files and directories to the ext partition.""" |
1336 | runCmd("wic create wictestdisk " | 1632 | runCmd("wic create wictestdisk " |
1337 | "--image-name=core-image-minimal " | 1633 | "--image-name=core-image-minimal " |
1338 | "-D -o %s" % self.resultdir) | 1634 | "-D -o %s" % self.resultdir) |
1339 | images = glob(self.resultdir + "wictestdisk-*.direct") | 1635 | images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct")) |
1340 | self.assertEqual(1, len(images)) | 1636 | self.assertEqual(1, len(images)) |
1341 | 1637 | ||
1342 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | 1638 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') |
@@ -1344,7 +1640,7 @@ class Wic2(WicTestCase): | |||
1344 | # list directory content of the ext4 partition | 1640 | # list directory content of the ext4 partition |
1345 | result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) | 1641 | result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) |
1346 | dirs = set(line.split()[-1] for line in result.output.split('\n') if line) | 1642 | dirs = set(line.split()[-1] for line in result.output.split('\n') if line) |
1347 | self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs)) | 1643 | self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs), msg="Expected directories not present %s" % dirs) |
1348 | 1644 | ||
1349 | with NamedTemporaryFile("w", suffix=".wic-cp") as testfile: | 1645 | with NamedTemporaryFile("w", suffix=".wic-cp") as testfile: |
1350 | testfile.write("test") | 1646 | testfile.write("test") |
@@ -1359,12 +1655,12 @@ class Wic2(WicTestCase): | |||
1359 | 1655 | ||
1360 | # check if the file to copy is in the partition | 1656 | # check if the file to copy is in the partition |
1361 | result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) | 1657 | result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) |
1362 | self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line]) | 1658 | self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line]) |
1363 | 1659 | ||
1364 | # copy file from the partition, replace the temporary file content with it and | 1660 | # copy file from the partition, replace the temporary file content with it and |
1365 | # check for the file size to validate the copy | 1661 | # check for the file size to validate the copy |
1366 | runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot)) | 1662 | runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot)) |
1367 | self.assertTrue(os.stat(testfile.name).st_size > 0) | 1663 | self.assertTrue(os.stat(testfile.name).st_size > 0, msg="Filesize not as expected %s" % os.stat(testfile.name).st_size) |
1368 | 1664 | ||
1369 | 1665 | ||
1370 | def test_wic_rm_ext(self): | 1666 | def test_wic_rm_ext(self): |
@@ -1372,25 +1668,25 @@ class Wic2(WicTestCase): | |||
1372 | runCmd("wic create mkefidisk " | 1668 | runCmd("wic create mkefidisk " |
1373 | "--image-name=core-image-minimal " | 1669 | "--image-name=core-image-minimal " |
1374 | "-D -o %s" % self.resultdir) | 1670 | "-D -o %s" % self.resultdir) |
1375 | images = glob(self.resultdir + "mkefidisk-*.direct") | 1671 | images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct")) |
1376 | self.assertEqual(1, len(images)) | 1672 | self.assertEqual(1, len(images)) |
1377 | 1673 | ||
1378 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | 1674 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') |
1379 | 1675 | ||
1380 | # list directory content of the /etc directory on ext4 partition | 1676 | # list directory content of the /etc directory on ext4 partition |
1381 | result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) | 1677 | result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) |
1382 | self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line]) | 1678 | self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line]) |
1383 | 1679 | ||
1384 | # remove file | 1680 | # remove file |
1385 | runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot)) | 1681 | runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot)) |
1386 | 1682 | ||
1387 | # check if it's removed | 1683 | # check if it's removed |
1388 | result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) | 1684 | result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) |
1389 | self.assertTrue('fstab' not in [line.split()[-1] for line in result.output.split('\n') if line]) | 1685 | self.assertNotIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line]) |
1390 | 1686 | ||
1391 | # remove non-empty directory | 1687 | # remove non-empty directory |
1392 | runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot)) | 1688 | runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot)) |
1393 | 1689 | ||
1394 | # check if it's removed | 1690 | # check if it's removed |
1395 | result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) | 1691 | result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) |
1396 | self.assertTrue('etc' not in [line.split()[-1] for line in result.output.split('\n') if line]) | 1692 | self.assertNotIn('etc', [line.split()[-1] for line in result.output.split('\n') if line]) |
diff --git a/meta/lib/oeqa/selftest/cases/wrapper.py b/meta/lib/oeqa/selftest/cases/wrapper.py new file mode 100644 index 0000000000..f2be44262c --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/wrapper.py | |||
@@ -0,0 +1,16 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | from oeqa.selftest.case import OESelftestTestCase | ||
7 | from oeqa.utils.commands import bitbake | ||
8 | |||
9 | class WrapperTests(OESelftestTestCase): | ||
10 | def test_shebang_wrapper(self): | ||
11 | """ | ||
12 | Summary: Build a recipe which will fail if the cmdline_shebang_wrapper function is defective. | ||
13 | Expected: Exit status to be 0. | ||
14 | Author: Paulo Neves <ptsneves@gmail.com> | ||
15 | """ | ||
16 | res = bitbake("cmdline-shebang-wrapper-test -c install", ignore_status=False) | ||
diff --git a/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py new file mode 100644 index 0000000000..312edb6431 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py | |||
@@ -0,0 +1,39 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import os | ||
8 | import sys | ||
9 | import subprocess | ||
10 | import shutil | ||
11 | from oeqa.selftest.case import OESelftestTestCase | ||
12 | from yocto_testresults_query import get_sha1, create_workdir | ||
13 | basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../') | ||
14 | lib_path = basepath + '/scripts/lib' | ||
15 | sys.path = sys.path + [lib_path] | ||
16 | |||
17 | |||
18 | class TestResultsQueryTests(OESelftestTestCase): | ||
19 | def test_get_sha1(self): | ||
20 | test_data_get_sha1 = [ | ||
21 | {"input": "yocto-4.0", "expected": "00cfdde791a0176c134f31e5a09eff725e75b905"}, | ||
22 | {"input": "4.1_M1", "expected": "95066dde6861ee08fdb505ab3e0422156cc24fae"}, | ||
23 | ] | ||
24 | for data in test_data_get_sha1: | ||
25 | test_name = data["input"] | ||
26 | with self.subTest(f"Test SHA1 from {test_name}"): | ||
27 | self.assertEqual( | ||
28 | get_sha1(basepath, data["input"]), data["expected"]) | ||
29 | |||
30 | def test_create_workdir(self): | ||
31 | workdir = create_workdir() | ||
32 | try: | ||
33 | url = subprocess.check_output( | ||
34 | ["git", "-C", workdir, "remote", "get-url", "origin"]).strip().decode("utf-8") | ||
35 | except: | ||
36 | shutil.rmtree(workdir, ignore_errors=True) | ||
37 | self.fail(f"Can not execute git commands in {workdir}") | ||
38 | shutil.rmtree(workdir) | ||
39 | self.assertEqual(url, "git://git.yoctoproject.org/yocto-testresults") | ||