diff options
Diffstat (limited to 'meta/lib/patchtest/tests')
-rw-r--r-- | meta/lib/patchtest/tests/__init__.py | 0 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/base.py | 252 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/test_mbox.py | 179 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/test_metadata.py | 212 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/test_patch.py | 131 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/test_python_pylint.py | 65 |
6 files changed, 839 insertions, 0 deletions
diff --git a/meta/lib/patchtest/tests/__init__.py b/meta/lib/patchtest/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/meta/lib/patchtest/tests/__init__.py | |||
diff --git a/meta/lib/patchtest/tests/base.py b/meta/lib/patchtest/tests/base.py new file mode 100644 index 0000000000..919ca136bb --- /dev/null +++ b/meta/lib/patchtest/tests/base.py | |||
@@ -0,0 +1,252 @@ | |||
1 | # Base class to be used by all test cases defined in the suite | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import unittest | ||
8 | import logging | ||
9 | import json | ||
10 | import unidiff | ||
11 | from patchtest_parser import PatchtestParser | ||
12 | import mailbox | ||
13 | import patchtest_patterns | ||
14 | import collections | ||
15 | import sys | ||
16 | import os | ||
17 | import re | ||
18 | |||
19 | logger = logging.getLogger("patchtest") | ||
20 | debug = logger.debug | ||
21 | info = logger.info | ||
22 | warn = logger.warn | ||
23 | error = logger.error | ||
24 | |||
25 | Commit = collections.namedtuple( | ||
26 | "Commit", ["author", "subject", "commit_message", "shortlog", "payload"] | ||
27 | ) | ||
28 | |||
29 | Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload']) | ||
30 | |||
31 | class PatchtestOEError(Exception): | ||
32 | """Exception for handling patchtest-oe errors""" | ||
33 | def __init__(self, message, exitcode=1): | ||
34 | super().__init__(message) | ||
35 | self.exitcode = exitcode | ||
36 | |||
37 | class Base(unittest.TestCase): | ||
38 | # if unit test fails, fail message will throw at least the following JSON: {"id": <testid>} | ||
39 | |||
40 | @staticmethod | ||
41 | def msg_to_commit(msg): | ||
42 | payload = msg.get_payload() | ||
43 | return Commit(subject=msg['subject'].replace('\n', ' ').replace(' ', ' '), | ||
44 | author=msg.get('From'), | ||
45 | shortlog=Base.shortlog(msg['subject']), | ||
46 | commit_message=Base.commit_message(payload), | ||
47 | payload=payload) | ||
48 | |||
49 | @staticmethod | ||
50 | def commit_message(payload): | ||
51 | commit_message = payload.__str__() | ||
52 | match = patchtest_patterns.endcommit_messages_regex.search(payload) | ||
53 | if match: | ||
54 | commit_message = payload[:match.start()] | ||
55 | return commit_message | ||
56 | |||
57 | @staticmethod | ||
58 | def shortlog(shlog): | ||
59 | # remove possible prefix (between brackets) before colon | ||
60 | start = shlog.find(']', 0, shlog.find(':')) | ||
61 | # remove also newlines and spaces at both sides | ||
62 | return shlog[start + 1:].replace('\n', '').strip() | ||
63 | |||
64 | @classmethod | ||
65 | def setUpClass(cls): | ||
66 | |||
67 | # General objects: mailbox.mbox and patchset | ||
68 | cls.mbox = mailbox.mbox(PatchtestParser.repo.patch.path) | ||
69 | |||
70 | # Patch may be malformed, so try parsing it | ||
71 | cls.unidiff_parse_error = '' | ||
72 | cls.patchset = None | ||
73 | try: | ||
74 | cls.patchset = unidiff.PatchSet.from_filename( | ||
75 | PatchtestParser.repo.patch.path, encoding="UTF-8" | ||
76 | ) | ||
77 | except unidiff.UnidiffParseError as upe: | ||
78 | cls.patchset = [] | ||
79 | cls.unidiff_parse_error = str(upe) | ||
80 | |||
81 | # Easy to iterate list of commits | ||
82 | cls.commits = [] | ||
83 | for msg in cls.mbox: | ||
84 | if msg['subject'] and msg.get_payload(): | ||
85 | cls.commits.append(Base.msg_to_commit(msg)) | ||
86 | |||
87 | cls.setUpClassLocal() | ||
88 | |||
89 | @classmethod | ||
90 | def tearDownClass(cls): | ||
91 | cls.tearDownClassLocal() | ||
92 | |||
93 | @classmethod | ||
94 | def setUpClassLocal(cls): | ||
95 | pass | ||
96 | |||
97 | @classmethod | ||
98 | def tearDownClassLocal(cls): | ||
99 | pass | ||
100 | |||
101 | def fail(self, issue, fix=None, commit=None, data=None): | ||
102 | """ Convert to a JSON string failure data""" | ||
103 | value = {'id': self.id(), | ||
104 | 'issue': issue} | ||
105 | |||
106 | if fix: | ||
107 | value['fix'] = fix | ||
108 | if commit: | ||
109 | value['commit'] = {'subject': commit.subject, | ||
110 | 'shortlog': commit.shortlog} | ||
111 | |||
112 | # extend return value with other useful info | ||
113 | if data: | ||
114 | value['data'] = data | ||
115 | |||
116 | return super(Base, self).fail(json.dumps(value)) | ||
117 | |||
118 | def skip(self, issue, data=None): | ||
119 | """ Convert the skip string to JSON""" | ||
120 | value = {'id': self.id(), | ||
121 | 'issue': issue} | ||
122 | |||
123 | # extend return value with other useful info | ||
124 | if data: | ||
125 | value['data'] = data | ||
126 | |||
127 | return super(Base, self).skipTest(json.dumps(value)) | ||
128 | |||
129 | def shortid(self): | ||
130 | return self.id().split('.')[-1] | ||
131 | |||
132 | def __str__(self): | ||
133 | return json.dumps({'id': self.id()}) | ||
134 | |||
135 | class Metadata(Base): | ||
136 | @classmethod | ||
137 | def setUpClassLocal(cls): | ||
138 | cls.tinfoil = cls.setup_tinfoil() | ||
139 | |||
140 | # get info about added/modified/remove recipes | ||
141 | cls.added, cls.modified, cls.removed = cls.get_metadata_stats(cls.patchset) | ||
142 | |||
143 | @classmethod | ||
144 | def tearDownClassLocal(cls): | ||
145 | cls.tinfoil.shutdown() | ||
146 | |||
147 | @classmethod | ||
148 | def setup_tinfoil(cls, config_only=False): | ||
149 | """Initialize tinfoil api from bitbake""" | ||
150 | |||
151 | # import relevant libraries | ||
152 | try: | ||
153 | scripts_path = os.path.join(PatchtestParser.repodir, "scripts", "lib") | ||
154 | if scripts_path not in sys.path: | ||
155 | sys.path.insert(0, scripts_path) | ||
156 | import scriptpath | ||
157 | scriptpath.add_bitbake_lib_path() | ||
158 | import bb.tinfoil | ||
159 | except ImportError: | ||
160 | raise PatchtestOEError('Could not import tinfoil module') | ||
161 | |||
162 | orig_cwd = os.path.abspath(os.curdir) | ||
163 | |||
164 | # Load tinfoil | ||
165 | tinfoil = None | ||
166 | try: | ||
167 | builddir = os.environ.get('BUILDDIR') | ||
168 | if not builddir: | ||
169 | logger.warn('Bitbake environment not loaded?') | ||
170 | return tinfoil | ||
171 | os.chdir(builddir) | ||
172 | tinfoil = bb.tinfoil.Tinfoil() | ||
173 | tinfoil.prepare(config_only=config_only) | ||
174 | except bb.tinfoil.TinfoilUIException as te: | ||
175 | if tinfoil: | ||
176 | tinfoil.shutdown() | ||
177 | raise PatchtestOEError('Could not prepare properly tinfoil (TinfoilUIException)') | ||
178 | except Exception as e: | ||
179 | if tinfoil: | ||
180 | tinfoil.shutdown() | ||
181 | raise e | ||
182 | finally: | ||
183 | os.chdir(orig_cwd) | ||
184 | |||
185 | return tinfoil | ||
186 | |||
187 | @classmethod | ||
188 | def get_metadata_stats(cls, patchset): | ||
189 | """Get lists of added, modified and removed metadata files""" | ||
190 | |||
191 | def find_pn(data, path): | ||
192 | """Find the PN from data""" | ||
193 | pn = None | ||
194 | pn_native = None | ||
195 | for _path, _pn in data: | ||
196 | if path in _path: | ||
197 | if 'native' in _pn: | ||
198 | # store the native PN but look for the non-native one first | ||
199 | pn_native = _pn | ||
200 | else: | ||
201 | pn = _pn | ||
202 | break | ||
203 | else: | ||
204 | # sent the native PN if found previously | ||
205 | if pn_native: | ||
206 | return pn_native | ||
207 | |||
208 | # on renames (usually upgrades), we need to check (FILE) base names | ||
209 | # because the unidiff library does not provided the new filename, just the modified one | ||
210 | # and tinfoil datastore, once the patch is merged, will contain the new filename | ||
211 | path_basename = path.split('_')[0] | ||
212 | for _path, _pn in data: | ||
213 | _path_basename = _path.split('_')[0] | ||
214 | if path_basename == _path_basename: | ||
215 | pn = _pn | ||
216 | return pn | ||
217 | |||
218 | if not cls.tinfoil: | ||
219 | cls.tinfoil = cls.setup_tinfoil() | ||
220 | |||
221 | added_paths, modified_paths, removed_paths = [], [], [] | ||
222 | added, modified, removed = [], [], [] | ||
223 | |||
224 | # get metadata filename additions, modification and removals | ||
225 | for patch in patchset: | ||
226 | if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'): | ||
227 | if patch.is_added_file: | ||
228 | added_paths.append( | ||
229 | os.path.join( | ||
230 | os.path.abspath(PatchtestParser.repodir), patch.path | ||
231 | ) | ||
232 | ) | ||
233 | elif patch.is_modified_file: | ||
234 | modified_paths.append( | ||
235 | os.path.join( | ||
236 | os.path.abspath(PatchtestParser.repodir), patch.path | ||
237 | ) | ||
238 | ) | ||
239 | elif patch.is_removed_file: | ||
240 | removed_paths.append( | ||
241 | os.path.join( | ||
242 | os.path.abspath(PatchtestParser.repodir), patch.path | ||
243 | ) | ||
244 | ) | ||
245 | |||
246 | data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items() | ||
247 | |||
248 | added = [find_pn(data,path) for path in added_paths] | ||
249 | modified = [find_pn(data,path) for path in modified_paths] | ||
250 | removed = [find_pn(data,path) for path in removed_paths] | ||
251 | |||
252 | return [a for a in added if a], [m for m in modified if m], [r for r in removed if r] | ||
diff --git a/meta/lib/patchtest/tests/test_mbox.py b/meta/lib/patchtest/tests/test_mbox.py new file mode 100644 index 0000000000..dab733ea77 --- /dev/null +++ b/meta/lib/patchtest/tests/test_mbox.py | |||
@@ -0,0 +1,179 @@ | |||
1 | # Checks related to the patch's author | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import base | ||
8 | import collections | ||
9 | import patchtest_patterns | ||
10 | import pyparsing | ||
11 | import re | ||
12 | import subprocess | ||
13 | from patchtest_parser import PatchtestParser | ||
14 | |||
15 | def headlog(): | ||
16 | output = subprocess.check_output( | ||
17 | "cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchtestParser.repodir, | ||
18 | universal_newlines=True, | ||
19 | shell=True | ||
20 | ) | ||
21 | return output.split('#') | ||
22 | |||
23 | class TestMbox(base.Base): | ||
24 | |||
25 | # base paths of main yocto project sub-projects | ||
26 | paths = { | ||
27 | 'oe-core': ['meta-selftest', 'meta-skeleton', 'meta', 'scripts'], | ||
28 | 'bitbake': ['bitbake'], | ||
29 | 'documentation': ['documentation'], | ||
30 | 'poky': ['meta-poky','meta-yocto-bsp'], | ||
31 | 'oe': ['meta-gpe', 'meta-gnome', 'meta-efl', 'meta-networking', 'meta-multimedia','meta-initramfs', 'meta-ruby', 'contrib', 'meta-xfce', 'meta-filesystems', 'meta-perl', 'meta-webserver', 'meta-systemd', 'meta-oe', 'meta-python'] | ||
32 | } | ||
33 | |||
34 | # scripts folder is a mix of oe-core and poky, most is oe-core code except: | ||
35 | poky_scripts = ['scripts/yocto-bsp', 'scripts/yocto-kernel', 'scripts/yocto-layer', 'scripts/lib/bsp'] | ||
36 | |||
37 | Project = collections.namedtuple('Project', ['name', 'listemail', 'gitrepo', 'paths']) | ||
38 | |||
39 | bitbake = Project(name='Bitbake', listemail='bitbake-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/bitbake/', paths=paths['bitbake']) | ||
40 | doc = Project(name='Documentantion', listemail='yocto@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/', paths=paths['documentation']) | ||
41 | poky = Project(name='Poky', listemail='poky@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/poky/', paths=paths['poky']) | ||
42 | oe = Project(name='oe', listemail='openembedded-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/meta-openembedded/', paths=paths['oe']) | ||
43 | |||
44 | |||
45 | def test_signed_off_by_presence(self): | ||
46 | for commit in self.commits: | ||
47 | # skip those patches that revert older commits, these do not required the tag presence | ||
48 | if patchtest_patterns.mbox_revert_shortlog_regex.search_string(commit.shortlog): | ||
49 | continue | ||
50 | if not patchtest_patterns.signed_off_by.search_string(commit.payload): | ||
51 | self.fail( | ||
52 | 'Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"', | ||
53 | commit=commit, | ||
54 | ) | ||
55 | |||
56 | def test_shortlog_format(self): | ||
57 | for commit in self.commits: | ||
58 | shortlog = commit.shortlog | ||
59 | if not shortlog.strip(): | ||
60 | self.skip('Empty shortlog, no reason to execute shortlog format test') | ||
61 | else: | ||
62 | # no reason to re-check on revert shortlogs | ||
63 | if shortlog.startswith('Revert "'): | ||
64 | continue | ||
65 | try: | ||
66 | patchtest_patterns.shortlog.parseString(shortlog) | ||
67 | except pyparsing.ParseException as pe: | ||
68 | self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"', | ||
69 | commit=commit) | ||
70 | |||
71 | def test_shortlog_length(self): | ||
72 | for commit in self.commits: | ||
73 | # no reason to re-check on revert shortlogs | ||
74 | shortlog = re.sub('^(\[.*?\])+ ', '', commit.shortlog) | ||
75 | if shortlog.startswith('Revert "'): | ||
76 | continue | ||
77 | l = len(shortlog) | ||
78 | if l > patchtest_patterns.mbox_shortlog_maxlength: | ||
79 | self.fail( | ||
80 | "Edit shortlog so that it is %d characters or less (currently %d characters)" | ||
81 | % (patchtest_patterns.mbox_shortlog_maxlength, l), | ||
82 | commit=commit, | ||
83 | ) | ||
84 | |||
85 | def test_series_merge_on_head(self): | ||
86 | self.skip("Merge test is disabled for now") | ||
87 | if PatchtestParser.repo.patch.branch != "master": | ||
88 | self.skip( | ||
89 | "Skipping merge test since patch is not intended" | ||
90 | " for master branch. Target detected is %s" | ||
91 | % PatchtestParser.repo.patch.branch | ||
92 | ) | ||
93 | if not PatchtestParser.repo.canbemerged: | ||
94 | commithash, author, date, shortlog = headlog() | ||
95 | self.fail( | ||
96 | "Series does not apply on top of target branch %s" | ||
97 | % PatchtestParser.repo.patch.branch, | ||
98 | data=[ | ||
99 | ( | ||
100 | "Targeted branch", | ||
101 | "%s (currently at %s)" | ||
102 | % (PatchtestParser.repo.patch.branch, commithash), | ||
103 | ) | ||
104 | ], | ||
105 | ) | ||
106 | |||
107 | def test_target_mailing_list(self): | ||
108 | """Check for other targeted projects""" | ||
109 | |||
110 | # a meta project may be indicted in the message subject, if this is the case, just fail | ||
111 | # TODO: there may be other project with no-meta prefix, we also need to detect these | ||
112 | project_regex = pyparsing.Regex("\[(?P<project>meta-.+)\]") | ||
113 | for commit in self.commits: | ||
114 | match = project_regex.search_string(commit.subject) | ||
115 | if match: | ||
116 | self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists', | ||
117 | commit=commit) | ||
118 | |||
119 | for patch in self.patchset: | ||
120 | folders = patch.path.split('/') | ||
121 | base_path = folders[0] | ||
122 | for project in [self.bitbake, self.doc, self.oe, self.poky]: | ||
123 | if base_path in project.paths: | ||
124 | self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists', | ||
125 | data=[('Suggested ML', '%s [%s]' % (project.listemail, project.gitrepo)), | ||
126 | ('Patch\'s path:', patch.path)]) | ||
127 | |||
128 | # check for poky's scripts code | ||
129 | if base_path.startswith('scripts'): | ||
130 | for poky_file in self.poky_scripts: | ||
131 | if patch.path.startswith(poky_file): | ||
132 | self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists', | ||
133 | data=[('Suggested ML', '%s [%s]' % (self.poky.listemail, self.poky.gitrepo)),('Patch\'s path:', patch.path)]) | ||
134 | |||
135 | def test_mbox_format(self): | ||
136 | if self.unidiff_parse_error: | ||
137 | self.fail('Series has malformed diff lines. Create the series again using git-format-patch and ensure it applies using git am', | ||
138 | data=[('Diff line',self.unidiff_parse_error)]) | ||
139 | |||
140 | def test_commit_message_presence(self): | ||
141 | for commit in self.commits: | ||
142 | if not commit.commit_message.strip(): | ||
143 | self.fail('Please include a commit message on your patch explaining the change', commit=commit) | ||
144 | |||
145 | # This may incorrectly report a failure if something such as a | ||
146 | # Python decorator is included in the commit message, but this | ||
147 | # scenario is much less common than the username case it is written | ||
148 | # to protect against | ||
149 | def test_commit_message_user_tags(self): | ||
150 | for commit in self.commits: | ||
151 | if patchtest_patterns.mbox_github_username.search_string(commit.commit_message): | ||
152 | self.fail('Mbox includes one or more GitHub-style username tags. Ensure that any "@" symbols are stripped out of usernames', commit=commit) | ||
153 | |||
154 | def test_bugzilla_entry_format(self): | ||
155 | for commit in self.commits: | ||
156 | if not patchtest_patterns.mbox_bugzilla.search_string(commit.commit_message): | ||
157 | self.skip("No bug ID found") | ||
158 | elif not patchtest_patterns.mbox_bugzilla_validation.search_string( | ||
159 | commit.commit_message | ||
160 | ): | ||
161 | self.fail( | ||
162 | 'Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"', | ||
163 | commit=commit, | ||
164 | ) | ||
165 | |||
166 | def test_author_valid(self): | ||
167 | for commit in self.commits: | ||
168 | for invalid in patchtest_patterns.invalid_submitters: | ||
169 | if invalid.search_string(commit.author): | ||
170 | self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit) | ||
171 | |||
172 | def test_non_auh_upgrade(self): | ||
173 | for commit in self.commits: | ||
174 | if patchtest_patterns.auh_email in commit.commit_message: | ||
175 | self.fail( | ||
176 | "Invalid author %s. Resend the series with a valid patch author" | ||
177 | % patchtest_patterns.auh_email, | ||
178 | commit=commit, | ||
179 | ) | ||
diff --git a/meta/lib/patchtest/tests/test_metadata.py b/meta/lib/patchtest/tests/test_metadata.py new file mode 100644 index 0000000000..2dee80b002 --- /dev/null +++ b/meta/lib/patchtest/tests/test_metadata.py | |||
@@ -0,0 +1,212 @@ | |||
1 | # Checks related to the patch's LIC_FILES_CHKSUM metadata variable | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import base | ||
8 | import collections | ||
9 | import os | ||
10 | import patchtest_patterns | ||
11 | import pyparsing | ||
12 | from patchtest_parser import PatchtestParser | ||
13 | |||
14 | # Data store commonly used to share values between pre and post-merge tests | ||
15 | PatchTestDataStore = collections.defaultdict(str) | ||
16 | |||
17 | class TestMetadata(base.Metadata): | ||
18 | |||
19 | def test_license_presence(self): | ||
20 | if not self.added: | ||
21 | self.skip('No added recipes, skipping test') | ||
22 | |||
23 | # TODO: this is a workaround so we can parse the recipe not | ||
24 | # containing the LICENSE var: add some default license instead | ||
25 | # of INVALID into auto.conf, then remove this line at the end | ||
26 | auto_conf = os.path.join(os.environ.get('BUILDDIR'), 'conf', 'auto.conf') | ||
27 | open_flag = 'w' | ||
28 | if os.path.exists(auto_conf): | ||
29 | open_flag = 'a' | ||
30 | with open(auto_conf, open_flag) as fd: | ||
31 | for pn in self.added: | ||
32 | fd.write('LICENSE ??= "%s"\n' % patchtest_patterns.invalid_license) | ||
33 | |||
34 | no_license = False | ||
35 | for pn in self.added: | ||
36 | rd = self.tinfoil.parse_recipe(pn) | ||
37 | license = rd.getVar(patchtest_patterns.metadata_lic) | ||
38 | if license == patchtest_patterns.invalid_license: | ||
39 | no_license = True | ||
40 | break | ||
41 | |||
42 | # remove auto.conf line or the file itself | ||
43 | if open_flag == 'w': | ||
44 | os.remove(auto_conf) | ||
45 | else: | ||
46 | fd = open(auto_conf, 'r') | ||
47 | lines = fd.readlines() | ||
48 | fd.close() | ||
49 | with open(auto_conf, 'w') as fd: | ||
50 | fd.write(''.join(lines[:-1])) | ||
51 | |||
52 | if no_license: | ||
53 | self.fail('Recipe does not have the LICENSE field set.') | ||
54 | |||
55 | def test_lic_files_chksum_presence(self): | ||
56 | if not self.added: | ||
57 | self.skip('No added recipes, skipping test') | ||
58 | |||
59 | for pn in self.added: | ||
60 | rd = self.tinfoil.parse_recipe(pn) | ||
61 | pathname = rd.getVar('FILE') | ||
62 | # we are not interested in images | ||
63 | if '/images/' in pathname: | ||
64 | continue | ||
65 | lic_files_chksum = rd.getVar(patchtest_patterns.metadata_chksum) | ||
66 | if rd.getVar(patchtest_patterns.license_var) == patchtest_patterns.closed: | ||
67 | continue | ||
68 | if not lic_files_chksum: | ||
69 | self.fail( | ||
70 | "%s is missing in newly added recipe" % patchtest_patterns.metadata_chksum | ||
71 | ) | ||
72 | |||
73 | def test_lic_files_chksum_modified_not_mentioned(self): | ||
74 | if not self.modified: | ||
75 | self.skip('No modified recipes, skipping test') | ||
76 | |||
77 | for patch in self.patchset: | ||
78 | # for the moment, we are just interested in metadata | ||
79 | if patch.path.endswith('.patch'): | ||
80 | continue | ||
81 | payload = str(patch) | ||
82 | if patchtest_patterns.lic_chksum_added.search_string( | ||
83 | payload | ||
84 | ) or patchtest_patterns.lic_chksum_removed.search_string(payload): | ||
85 | # if any patch on the series contain reference on the metadata, fail | ||
86 | for commit in self.commits: | ||
87 | if patchtest_patterns.lictag_re.search_string(commit.commit_message): | ||
88 | break | ||
89 | else: | ||
90 | self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message') | ||
91 | |||
92 | def test_max_line_length(self): | ||
93 | for patch in self.patchset: | ||
94 | # for the moment, we are just interested in metadata | ||
95 | if patch.path.endswith('.patch'): | ||
96 | continue | ||
97 | payload = str(patch) | ||
98 | for line in payload.splitlines(): | ||
99 | if patchtest_patterns.add_mark.search_string(line): | ||
100 | current_line_length = len(line[1:]) | ||
101 | if current_line_length > patchtest_patterns.patch_max_line_length: | ||
102 | self.fail( | ||
103 | "Patch line too long (current length %s, maximum is %s)" | ||
104 | % (current_line_length, patchtest_patterns.patch_max_line_length), | ||
105 | data=[ | ||
106 | ("Patch", patch.path), | ||
107 | ("Line", "%s ..." % line[0:80]), | ||
108 | ], | ||
109 | ) | ||
110 | |||
111 | def pretest_src_uri_left_files(self): | ||
112 | # these tests just make sense on patches that can be merged | ||
113 | if not PatchtestParser.repo.canbemerged: | ||
114 | self.skip("Patch cannot be merged") | ||
115 | if not self.modified: | ||
116 | self.skip('No modified recipes, skipping pretest') | ||
117 | |||
118 | # get the proper metadata values | ||
119 | for pn in self.modified: | ||
120 | # we are not interested in images | ||
121 | if 'core-image' in pn: | ||
122 | continue | ||
123 | rd = self.tinfoil.parse_recipe(pn) | ||
124 | PatchTestDataStore[ | ||
125 | "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn) | ||
126 | ] = rd.getVar(patchtest_patterns.metadata_src_uri) | ||
127 | |||
128 | def test_src_uri_left_files(self): | ||
129 | # these tests just make sense on patches that can be merged | ||
130 | if not PatchtestParser.repo.canbemerged: | ||
131 | self.skip("Patch cannot be merged") | ||
132 | if not self.modified: | ||
133 | self.skip('No modified recipes, skipping pretest') | ||
134 | |||
135 | # get the proper metadata values | ||
136 | for pn in self.modified: | ||
137 | # we are not interested in images | ||
138 | if 'core-image' in pn: | ||
139 | continue | ||
140 | rd = self.tinfoil.parse_recipe(pn) | ||
141 | PatchTestDataStore[ | ||
142 | "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn) | ||
143 | ] = rd.getVar(patchtest_patterns.metadata_src_uri) | ||
144 | |||
145 | for pn in self.modified: | ||
146 | pretest_src_uri = PatchTestDataStore[ | ||
147 | "pre%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn) | ||
148 | ].split() | ||
149 | test_src_uri = PatchTestDataStore[ | ||
150 | "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn) | ||
151 | ].split() | ||
152 | |||
153 | pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')]) | ||
154 | test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')]) | ||
155 | |||
156 | # check if files were removed | ||
157 | if len(test_files) < len(pretest_files): | ||
158 | |||
159 | # get removals from patchset | ||
160 | filesremoved_from_patchset = set() | ||
161 | for patch in self.patchset: | ||
162 | if patch.is_removed_file: | ||
163 | filesremoved_from_patchset.add(os.path.basename(patch.path)) | ||
164 | |||
165 | # get the deleted files from the SRC_URI | ||
166 | filesremoved_from_usr_uri = pretest_files - test_files | ||
167 | |||
168 | # finally, get those patches removed at SRC_URI and not removed from the patchset | ||
169 | # TODO: we are not taking into account renames, so test may raise false positives | ||
170 | not_removed = filesremoved_from_usr_uri - filesremoved_from_patchset | ||
171 | if not_removed: | ||
172 | self.fail('Patches not removed from tree. Remove them and amend the submitted mbox', | ||
173 | data=[('Patch', f) for f in not_removed]) | ||
174 | |||
175 | def test_summary_presence(self): | ||
176 | if not self.added: | ||
177 | self.skip('No added recipes, skipping test') | ||
178 | |||
179 | for pn in self.added: | ||
180 | # we are not interested in images | ||
181 | if 'core-image' in pn: | ||
182 | continue | ||
183 | rd = self.tinfoil.parse_recipe(pn) | ||
184 | summary = rd.getVar(patchtest_patterns.metadata_summary) | ||
185 | |||
186 | # "${PN} version ${PN}-${PR}" is the default, so fail if default | ||
187 | if summary.startswith("%s version" % pn): | ||
188 | self.fail( | ||
189 | "%s is missing in newly added recipe" % patchtest_patterns.metadata_summary | ||
190 | ) | ||
191 | |||
192 | def test_cve_check_ignore(self): | ||
193 | # Skip if we neither modified a recipe or target branches are not | ||
194 | # Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield. | ||
195 | if ( | ||
196 | not self.modified | ||
197 | or PatchtestParser.repo.patch.branch == "kirkstone" | ||
198 | or PatchtestParser.repo.patch.branch == "dunfell" | ||
199 | ): | ||
200 | self.skip("No modified recipes or older target branch, skipping test") | ||
201 | for pn in self.modified: | ||
202 | # we are not interested in images | ||
203 | if 'core-image' in pn: | ||
204 | continue | ||
205 | rd = self.tinfoil.parse_recipe(pn) | ||
206 | cve_check_ignore = rd.getVar(patchtest_patterns.cve_check_ignore_var) | ||
207 | |||
208 | if cve_check_ignore is not None: | ||
209 | self.fail( | ||
210 | "%s is deprecated and should be replaced by %s" | ||
211 | % (patchtest_patterns.cve_check_ignore_var, patchtest_patterns.cve_status_var) | ||
212 | ) | ||
diff --git a/meta/lib/patchtest/tests/test_patch.py b/meta/lib/patchtest/tests/test_patch.py new file mode 100644 index 0000000000..d08b8a5019 --- /dev/null +++ b/meta/lib/patchtest/tests/test_patch.py | |||
@@ -0,0 +1,131 @@ | |||
1 | # Checks related to the patch's CVE lines | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | import base | ||
9 | import os | ||
10 | import patchtest_patterns | ||
11 | import pyparsing | ||
12 | |||
13 | class TestPatch(base.Base): | ||
14 | |||
15 | @classmethod | ||
16 | def setUpClassLocal(cls): | ||
17 | cls.newpatches = [] | ||
18 | # get just those relevant patches: new software patches | ||
19 | for patch in cls.patchset: | ||
20 | if patch.path.endswith('.patch') and patch.is_added_file: | ||
21 | cls.newpatches.append(patch) | ||
22 | |||
23 | cls.mark = str(patchtest_patterns.signed_off_by_prefix).strip('"') | ||
24 | |||
25 | # match PatchSignedOffBy.mark with '+' preceding it | ||
26 | cls.prog = patchtest_patterns.patch_signed_off_by | ||
27 | |||
28 | def setUp(self): | ||
29 | if self.unidiff_parse_error: | ||
30 | self.skip('Parse error %s' % self.unidiff_parse_error) | ||
31 | |||
32 | self.valid_status = ", ".join(patchtest_patterns.upstream_status_nonliteral_valid_status) | ||
33 | self.standard_format = "Upstream-Status: <Valid status>" | ||
34 | |||
35 | # we are just interested in series that introduce CVE patches, thus discard other | ||
36 | # possibilities: modification to current CVEs, patch directly introduced into the | ||
37 | # recipe, upgrades already including the CVE, etc. | ||
38 | new_cves = [p for p in self.patchset if p.path.endswith('.patch') and p.is_added_file] | ||
39 | if not new_cves: | ||
40 | self.skip('No new CVE patches introduced') | ||
41 | |||
42 | def test_upstream_status_presence_format(self): | ||
43 | if not TestPatch.newpatches: | ||
44 | self.skip("There are no new software patches, no reason to test Upstream-Status presence/format") | ||
45 | |||
46 | for newpatch in TestPatch.newpatches: | ||
47 | payload = newpatch.__str__() | ||
48 | if not patchtest_patterns.upstream_status_regex.search_string(payload): | ||
49 | self.fail( | ||
50 | "Added patch file is missing Upstream-Status: <Valid status> in the commit message", | ||
51 | data=[ | ||
52 | ("Standard format", self.standard_format), | ||
53 | ("Valid status", self.valid_status), | ||
54 | ], | ||
55 | ) | ||
56 | for line in payload.splitlines(): | ||
57 | if patchtest_patterns.patchmetadata_regex.match(line): | ||
58 | continue | ||
59 | if patchtest_patterns.upstream_status_regex.search_string(line): | ||
60 | if patchtest_patterns.inappropriate.searchString(line): | ||
61 | try: | ||
62 | patchtest_patterns.upstream_status_inappropriate_info.parseString( | ||
63 | line.lstrip("+") | ||
64 | ) | ||
65 | except pyparsing.ParseException as pe: | ||
66 | self.fail( | ||
67 | "Upstream-Status is Inappropriate, but no reason was provided", | ||
68 | data=[ | ||
69 | ("Current", pe.pstr), | ||
70 | ( | ||
71 | "Standard format", | ||
72 | "Upstream-Status: Inappropriate [reason]", | ||
73 | ), | ||
74 | ], | ||
75 | ) | ||
76 | elif patchtest_patterns.submitted.searchString(line): | ||
77 | try: | ||
78 | patchtest_patterns.upstream_status_submitted_info.parseString( | ||
79 | line.lstrip("+") | ||
80 | ) | ||
81 | except pyparsing.ParseException as pe: | ||
82 | self.fail( | ||
83 | "Upstream-Status is Submitted, but it is not mentioned where", | ||
84 | data=[ | ||
85 | ("Current", pe.pstr), | ||
86 | ( | ||
87 | "Standard format", | ||
88 | "Upstream-Status: Submitted [where]", | ||
89 | ), | ||
90 | ], | ||
91 | ) | ||
92 | else: | ||
93 | try: | ||
94 | patchtest_patterns.upstream_status.parseString(line.lstrip("+")) | ||
95 | except pyparsing.ParseException as pe: | ||
96 | self.fail( | ||
97 | "Upstream-Status is in incorrect format", | ||
98 | data=[ | ||
99 | ("Current", pe.pstr), | ||
100 | ("Standard format", self.standard_format), | ||
101 | ("Valid status", self.valid_status), | ||
102 | ], | ||
103 | ) | ||
104 | |||
105 | def test_signed_off_by_presence(self): | ||
106 | if not TestPatch.newpatches: | ||
107 | self.skip("There are no new software patches, no reason to test %s presence" % PatchSignedOffBy.mark) | ||
108 | |||
109 | for newpatch in TestPatch.newpatches: | ||
110 | payload = newpatch.__str__() | ||
111 | for line in payload.splitlines(): | ||
112 | if patchtest_patterns.patchmetadata_regex.match(line): | ||
113 | continue | ||
114 | if TestPatch.prog.search_string(payload): | ||
115 | break | ||
116 | else: | ||
117 | self.fail('A patch file has been added without a Signed-off-by tag: \'%s\'' % os.path.basename(newpatch.path)) | ||
118 | |||
119 | def test_cve_tag_format(self): | ||
120 | for commit in TestPatch.commits: | ||
121 | if patchtest_patterns.cve.search_string( | ||
122 | commit.shortlog | ||
123 | ) or patchtest_patterns.cve.search_string(commit.commit_message): | ||
124 | tag_found = False | ||
125 | for line in commit.payload.splitlines(): | ||
126 | if patchtest_patterns.cve_payload_tag.search_string(line): | ||
127 | tag_found = True | ||
128 | break | ||
129 | if not tag_found: | ||
130 | self.fail('Missing or incorrectly formatted CVE tag in patch file. Correct or include the CVE tag in the patch with format: "CVE: CVE-YYYY-XXXX"', | ||
131 | commit=commit) | ||
diff --git a/meta/lib/patchtest/tests/test_python_pylint.py b/meta/lib/patchtest/tests/test_python_pylint.py new file mode 100644 index 0000000000..ec9129bc79 --- /dev/null +++ b/meta/lib/patchtest/tests/test_python_pylint.py | |||
@@ -0,0 +1,65 @@ | |||
1 | # Checks related to the python code done with pylint | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import base | ||
8 | from io import StringIO | ||
9 | from patchtest_parser import PatchtestParser | ||
10 | from pylint.reporters.text import TextReporter | ||
11 | import pylint.lint as lint | ||
12 | |||
13 | |||
14 | class PyLint(base.Base): | ||
15 | pythonpatches = [] | ||
16 | pylint_pretest = {} | ||
17 | pylint_test = {} | ||
18 | pylint_options = " -E --disable='E0611, E1101, F0401, E0602' --msg-template='L:{line} F:{module} I:{msg}'" | ||
19 | |||
20 | @classmethod | ||
21 | def setUpClassLocal(cls): | ||
22 | # get just those patches touching python files | ||
23 | cls.pythonpatches = [] | ||
24 | for patch in cls.patchset: | ||
25 | if patch.path.endswith('.py'): | ||
26 | if not patch.is_removed_file: | ||
27 | cls.pythonpatches.append(patch) | ||
28 | |||
29 | def setUp(self): | ||
30 | if self.unidiff_parse_error: | ||
31 | self.skip('Python-unidiff parse error') | ||
32 | if not PyLint.pythonpatches: | ||
33 | self.skip('No python related patches, skipping test') | ||
34 | |||
35 | def pretest_pylint(self): | ||
36 | for pythonpatch in self.pythonpatches: | ||
37 | if pythonpatch.is_modified_file: | ||
38 | pylint_output = StringIO() | ||
39 | reporter = TextReporter(pylint_output) | ||
40 | lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False) | ||
41 | for line in pylint_output.readlines(): | ||
42 | if not '*' in line: | ||
43 | if line.strip(): | ||
44 | self.pylint_pretest[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1] | ||
45 | |||
46 | def test_pylint(self): | ||
47 | for pythonpatch in self.pythonpatches: | ||
48 | # a condition checking whether a file is renamed or not | ||
49 | # unidiff doesn't support this yet | ||
50 | if pythonpatch.target_file is not pythonpatch.path: | ||
51 | path = pythonpatch.target_file[2:] | ||
52 | else: | ||
53 | path = pythonpatch.path | ||
54 | pylint_output = StringIO() | ||
55 | reporter = TextReporter(pylint_output) | ||
56 | lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False) | ||
57 | for line in pylint_output.readlines(): | ||
58 | if not '*' in line: | ||
59 | if line.strip(): | ||
60 | self.pylint_test[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1] | ||
61 | |||
62 | for issue in self.pylint_test: | ||
63 | if self.pylint_test[issue] not in self.pylint_pretest.values(): | ||
64 | self.fail('Errors in your Python code were encountered. Please check your code with a linter and resubmit', | ||
65 | data=[('Output', 'Please, fix the listed issues:'), ('', issue + ' ' + self.pylint_test[issue])]) | ||