diff options
Diffstat (limited to 'meta/lib/patchtest/tests')
-rw-r--r-- | meta/lib/patchtest/tests/__init__.py | 0 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/base.py | 239 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/pyparsing/common.py | 26 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py | 18 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/pyparsing/parse_shortlog.py | 14 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py | 22 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py | 24 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/test_mbox.py | 159 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/test_metadata.py | 197 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/test_patch.py | 103 | ||||
-rw-r--r-- | meta/lib/patchtest/tests/test_python_pylint.py | 65 |
11 files changed, 867 insertions, 0 deletions
diff --git a/meta/lib/patchtest/tests/__init__.py b/meta/lib/patchtest/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/meta/lib/patchtest/tests/__init__.py | |||
diff --git a/meta/lib/patchtest/tests/base.py b/meta/lib/patchtest/tests/base.py new file mode 100644 index 0000000000..424e61b5be --- /dev/null +++ b/meta/lib/patchtest/tests/base.py | |||
@@ -0,0 +1,239 @@ | |||
1 | # Base class to be used by all test cases defined in the suite | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import unittest | ||
8 | import logging | ||
9 | import json | ||
10 | import unidiff | ||
11 | from data import PatchTestInput | ||
12 | import mailbox | ||
13 | import collections | ||
14 | import sys | ||
15 | import os | ||
16 | import re | ||
17 | |||
18 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'pyparsing')) | ||
19 | |||
20 | logger = logging.getLogger('patchtest') | ||
21 | debug=logger.debug | ||
22 | info=logger.info | ||
23 | warn=logger.warn | ||
24 | error=logger.error | ||
25 | |||
26 | Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload']) | ||
27 | |||
28 | class PatchtestOEError(Exception): | ||
29 | """Exception for handling patchtest-oe errors""" | ||
30 | def __init__(self, message, exitcode=1): | ||
31 | super().__init__(message) | ||
32 | self.exitcode = exitcode | ||
33 | |||
34 | class Base(unittest.TestCase): | ||
35 | # if unit test fails, fail message will throw at least the following JSON: {"id": <testid>} | ||
36 | |||
37 | endcommit_messages_regex = re.compile(r'\(From \w+-\w+ rev:|(?<!\S)Signed-off-by|(?<!\S)---\n') | ||
38 | patchmetadata_regex = re.compile(r'-{3} \S+|\+{3} \S+|@{2} -\d+,\d+ \+\d+,\d+ @{2} \S+') | ||
39 | |||
40 | |||
41 | @staticmethod | ||
42 | def msg_to_commit(msg): | ||
43 | payload = msg.get_payload() | ||
44 | return Commit(subject=msg['subject'].replace('\n', ' ').replace(' ', ' '), | ||
45 | author=msg.get('From'), | ||
46 | shortlog=Base.shortlog(msg['subject']), | ||
47 | commit_message=Base.commit_message(payload), | ||
48 | payload=payload) | ||
49 | |||
50 | @staticmethod | ||
51 | def commit_message(payload): | ||
52 | commit_message = payload.__str__() | ||
53 | match = Base.endcommit_messages_regex.search(payload) | ||
54 | if match: | ||
55 | commit_message = payload[:match.start()] | ||
56 | return commit_message | ||
57 | |||
58 | @staticmethod | ||
59 | def shortlog(shlog): | ||
60 | # remove possible prefix (between brackets) before colon | ||
61 | start = shlog.find(']', 0, shlog.find(':')) | ||
62 | # remove also newlines and spaces at both sides | ||
63 | return shlog[start + 1:].replace('\n', '').strip() | ||
64 | |||
65 | @classmethod | ||
66 | def setUpClass(cls): | ||
67 | |||
68 | # General objects: mailbox.mbox and patchset | ||
69 | cls.mbox = mailbox.mbox(PatchTestInput.repo.patch) | ||
70 | |||
71 | # Patch may be malformed, so try parsing it | ||
72 | cls.unidiff_parse_error = '' | ||
73 | cls.patchset = None | ||
74 | try: | ||
75 | cls.patchset = unidiff.PatchSet.from_filename(PatchTestInput.repo.patch, encoding=u'UTF-8') | ||
76 | except unidiff.UnidiffParseError as upe: | ||
77 | cls.patchset = [] | ||
78 | cls.unidiff_parse_error = str(upe) | ||
79 | |||
80 | # Easy to iterate list of commits | ||
81 | cls.commits = [] | ||
82 | for msg in cls.mbox: | ||
83 | if msg['subject'] and msg.get_payload(): | ||
84 | cls.commits.append(Base.msg_to_commit(msg)) | ||
85 | |||
86 | cls.setUpClassLocal() | ||
87 | |||
88 | @classmethod | ||
89 | def tearDownClass(cls): | ||
90 | cls.tearDownClassLocal() | ||
91 | |||
92 | @classmethod | ||
93 | def setUpClassLocal(cls): | ||
94 | pass | ||
95 | |||
96 | @classmethod | ||
97 | def tearDownClassLocal(cls): | ||
98 | pass | ||
99 | |||
100 | def fail(self, issue, fix=None, commit=None, data=None): | ||
101 | """ Convert to a JSON string failure data""" | ||
102 | value = {'id': self.id(), | ||
103 | 'issue': issue} | ||
104 | |||
105 | if fix: | ||
106 | value['fix'] = fix | ||
107 | if commit: | ||
108 | value['commit'] = {'subject': commit.subject, | ||
109 | 'shortlog': commit.shortlog} | ||
110 | |||
111 | # extend return value with other useful info | ||
112 | if data: | ||
113 | value['data'] = data | ||
114 | |||
115 | return super(Base, self).fail(json.dumps(value)) | ||
116 | |||
117 | def skip(self, issue, data=None): | ||
118 | """ Convert the skip string to JSON""" | ||
119 | value = {'id': self.id(), | ||
120 | 'issue': issue} | ||
121 | |||
122 | # extend return value with other useful info | ||
123 | if data: | ||
124 | value['data'] = data | ||
125 | |||
126 | return super(Base, self).skipTest(json.dumps(value)) | ||
127 | |||
128 | def shortid(self): | ||
129 | return self.id().split('.')[-1] | ||
130 | |||
131 | def __str__(self): | ||
132 | return json.dumps({'id': self.id()}) | ||
133 | |||
134 | class Metadata(Base): | ||
135 | @classmethod | ||
136 | def setUpClassLocal(cls): | ||
137 | cls.tinfoil = cls.setup_tinfoil() | ||
138 | |||
139 | # get info about added/modified/remove recipes | ||
140 | cls.added, cls.modified, cls.removed = cls.get_metadata_stats(cls.patchset) | ||
141 | |||
142 | @classmethod | ||
143 | def tearDownClassLocal(cls): | ||
144 | cls.tinfoil.shutdown() | ||
145 | |||
146 | @classmethod | ||
147 | def setup_tinfoil(cls, config_only=False): | ||
148 | """Initialize tinfoil api from bitbake""" | ||
149 | |||
150 | # import relevant libraries | ||
151 | try: | ||
152 | scripts_path = os.path.join(PatchTestInput.repodir, 'scripts', 'lib') | ||
153 | if scripts_path not in sys.path: | ||
154 | sys.path.insert(0, scripts_path) | ||
155 | import scriptpath | ||
156 | scriptpath.add_bitbake_lib_path() | ||
157 | import bb.tinfoil | ||
158 | except ImportError: | ||
159 | raise PatchtestOEError('Could not import tinfoil module') | ||
160 | |||
161 | orig_cwd = os.path.abspath(os.curdir) | ||
162 | |||
163 | # Load tinfoil | ||
164 | tinfoil = None | ||
165 | try: | ||
166 | builddir = os.environ.get('BUILDDIR') | ||
167 | if not builddir: | ||
168 | logger.warn('Bitbake environment not loaded?') | ||
169 | return tinfoil | ||
170 | os.chdir(builddir) | ||
171 | tinfoil = bb.tinfoil.Tinfoil() | ||
172 | tinfoil.prepare(config_only=config_only) | ||
173 | except bb.tinfoil.TinfoilUIException as te: | ||
174 | if tinfoil: | ||
175 | tinfoil.shutdown() | ||
176 | raise PatchtestOEError('Could not prepare properly tinfoil (TinfoilUIException)') | ||
177 | except Exception as e: | ||
178 | if tinfoil: | ||
179 | tinfoil.shutdown() | ||
180 | raise e | ||
181 | finally: | ||
182 | os.chdir(orig_cwd) | ||
183 | |||
184 | return tinfoil | ||
185 | |||
186 | @classmethod | ||
187 | def get_metadata_stats(cls, patchset): | ||
188 | """Get lists of added, modified and removed metadata files""" | ||
189 | |||
190 | def find_pn(data, path): | ||
191 | """Find the PN from data""" | ||
192 | pn = None | ||
193 | pn_native = None | ||
194 | for _path, _pn in data: | ||
195 | if path in _path: | ||
196 | if 'native' in _pn: | ||
197 | # store the native PN but look for the non-native one first | ||
198 | pn_native = _pn | ||
199 | else: | ||
200 | pn = _pn | ||
201 | break | ||
202 | else: | ||
203 | # sent the native PN if found previously | ||
204 | if pn_native: | ||
205 | return pn_native | ||
206 | |||
207 | # on renames (usually upgrades), we need to check (FILE) base names | ||
208 | # because the unidiff library does not provided the new filename, just the modified one | ||
209 | # and tinfoil datastore, once the patch is merged, will contain the new filename | ||
210 | path_basename = path.split('_')[0] | ||
211 | for _path, _pn in data: | ||
212 | _path_basename = _path.split('_')[0] | ||
213 | if path_basename == _path_basename: | ||
214 | pn = _pn | ||
215 | return pn | ||
216 | |||
217 | if not cls.tinfoil: | ||
218 | cls.tinfoil = cls.setup_tinfoil() | ||
219 | |||
220 | added_paths, modified_paths, removed_paths = [], [], [] | ||
221 | added, modified, removed = [], [], [] | ||
222 | |||
223 | # get metadata filename additions, modification and removals | ||
224 | for patch in patchset: | ||
225 | if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'): | ||
226 | if patch.is_added_file: | ||
227 | added_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path)) | ||
228 | elif patch.is_modified_file: | ||
229 | modified_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path)) | ||
230 | elif patch.is_removed_file: | ||
231 | removed_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path)) | ||
232 | |||
233 | data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items() | ||
234 | |||
235 | added = [find_pn(data,path) for path in added_paths] | ||
236 | modified = [find_pn(data,path) for path in modified_paths] | ||
237 | removed = [find_pn(data,path) for path in removed_paths] | ||
238 | |||
239 | return [a for a in added if a], [m for m in modified if m], [r for r in removed if r] | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/common.py b/meta/lib/patchtest/tests/pyparsing/common.py new file mode 100644 index 0000000000..cbce4c38bc --- /dev/null +++ b/meta/lib/patchtest/tests/pyparsing/common.py | |||
@@ -0,0 +1,26 @@ | |||
1 | # common pyparsing variables | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import pyparsing | ||
8 | |||
9 | # general | ||
10 | colon = pyparsing.Literal(":") | ||
11 | start = pyparsing.LineStart() | ||
12 | end = pyparsing.LineEnd() | ||
13 | at = pyparsing.Literal("@") | ||
14 | lessthan = pyparsing.Literal("<") | ||
15 | greaterthan = pyparsing.Literal(">") | ||
16 | opensquare = pyparsing.Literal("[") | ||
17 | closesquare = pyparsing.Literal("]") | ||
18 | inappropriate = pyparsing.CaselessLiteral("Inappropriate") | ||
19 | submitted = pyparsing.CaselessLiteral("Submitted") | ||
20 | |||
21 | # word related | ||
22 | nestexpr = pyparsing.nestedExpr(opener='[', closer=']') | ||
23 | inappropriateinfo = pyparsing.Literal("Inappropriate") + nestexpr | ||
24 | submittedinfo = pyparsing.Literal("Submitted") + nestexpr | ||
25 | word = pyparsing.Word(pyparsing.alphas) | ||
26 | worddot = pyparsing.Word(pyparsing.alphas+".") | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py b/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py new file mode 100644 index 0000000000..f7fb82ec2b --- /dev/null +++ b/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py | |||
@@ -0,0 +1,18 @@ | |||
1 | # signed-off-by pyparsing definition | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | |||
8 | import pyparsing | ||
9 | import common | ||
10 | |||
11 | name = pyparsing.Regex('\S+.*(?= <)') | ||
12 | username = pyparsing.OneOrMore(common.worddot) | ||
13 | domain = pyparsing.OneOrMore(common.worddot) | ||
14 | cve = pyparsing.Regex('CVE\-\d{4}\-\d+') | ||
15 | cve_mark = pyparsing.Literal("CVE:") | ||
16 | |||
17 | cve_tag = pyparsing.AtLineStart(cve_mark + cve) | ||
18 | patch_cve_tag = pyparsing.AtLineStart("+" + cve_mark + cve) | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py b/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py new file mode 100644 index 0000000000..30d3ab35b3 --- /dev/null +++ b/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py | |||
@@ -0,0 +1,14 @@ | |||
1 | # subject pyparsing definition | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | # NOTE:This is an oversimplified syntax of the mbox's summary | ||
8 | |||
9 | import pyparsing | ||
10 | import common | ||
11 | |||
12 | target = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables.replace(':',''))) | ||
13 | summary = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables)) | ||
14 | shortlog = common.start + target + common.colon + summary + common.end | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py b/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py new file mode 100644 index 0000000000..692ebec3ff --- /dev/null +++ b/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py | |||
@@ -0,0 +1,22 @@ | |||
1 | # signed-off-by pyparsing definition | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | |||
8 | import pyparsing | ||
9 | import common | ||
10 | |||
11 | name = pyparsing.Regex('\S+.*(?= <)') | ||
12 | username = pyparsing.OneOrMore(common.worddot) | ||
13 | domain = pyparsing.OneOrMore(common.worddot) | ||
14 | |||
15 | # taken from https://pyparsing-public.wikispaces.com/Helpful+Expressions | ||
16 | email = pyparsing.Regex(r"(?P<user>[A-Za-z0-9._%+-]+)@(?P<hostname>[A-Za-z0-9.-]+)\.(?P<domain>[A-Za-z]{2,})") | ||
17 | |||
18 | email_enclosed = common.lessthan + email + common.greaterthan | ||
19 | |||
20 | signed_off_by_mark = pyparsing.Literal("Signed-off-by:") | ||
21 | signed_off_by = pyparsing.AtLineStart(signed_off_by_mark + name + email_enclosed) | ||
22 | patch_signed_off_by = pyparsing.AtLineStart("+" + signed_off_by_mark + name + email_enclosed) | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py b/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py new file mode 100644 index 0000000000..bc6c427c4c --- /dev/null +++ b/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py | |||
@@ -0,0 +1,24 @@ | |||
1 | # upstream-status pyparsing definition | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | |||
8 | import common | ||
9 | import pyparsing | ||
10 | |||
11 | upstream_status_literal_valid_status = ["Pending", "Backport", "Denied", "Inappropriate", "Submitted"] | ||
12 | upstream_status_nonliteral_valid_status = ["Pending", "Backport", "Denied", "Inappropriate [reason]", "Submitted [where]"] | ||
13 | |||
14 | upstream_status_valid_status = pyparsing.Or( | ||
15 | [pyparsing.Literal(status) for status in upstream_status_literal_valid_status] | ||
16 | ) | ||
17 | |||
18 | upstream_status_mark = pyparsing.Literal("Upstream-Status") | ||
19 | inappropriate_status_mark = common.inappropriate | ||
20 | submitted_status_mark = common.submitted | ||
21 | |||
22 | upstream_status = common.start + upstream_status_mark + common.colon + upstream_status_valid_status | ||
23 | upstream_status_inappropriate_info = common.start + upstream_status_mark + common.colon + common.inappropriateinfo | ||
24 | upstream_status_submitted_info = common.start + upstream_status_mark + common.colon + common.submittedinfo | ||
diff --git a/meta/lib/patchtest/tests/test_mbox.py b/meta/lib/patchtest/tests/test_mbox.py new file mode 100644 index 0000000000..0b623b7d17 --- /dev/null +++ b/meta/lib/patchtest/tests/test_mbox.py | |||
@@ -0,0 +1,159 @@ | |||
1 | # Checks related to the patch's author | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import base | ||
8 | import collections | ||
9 | import parse_shortlog | ||
10 | import parse_signed_off_by | ||
11 | import pyparsing | ||
12 | import subprocess | ||
13 | from data import PatchTestInput | ||
14 | |||
15 | def headlog(): | ||
16 | output = subprocess.check_output( | ||
17 | "cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchTestInput.repodir, | ||
18 | universal_newlines=True, | ||
19 | shell=True | ||
20 | ) | ||
21 | return output.split('#') | ||
22 | |||
23 | class TestMbox(base.Base): | ||
24 | |||
25 | auh_email = 'auh@auh.yoctoproject.org' | ||
26 | |||
27 | invalids = [pyparsing.Regex("^Upgrade Helper.+"), | ||
28 | pyparsing.Regex(auh_email), | ||
29 | pyparsing.Regex("uh@not\.set"), | ||
30 | pyparsing.Regex("\S+@example\.com")] | ||
31 | |||
32 | rexp_detect = pyparsing.Regex('\[\s?YOCTO.*\]') | ||
33 | rexp_validation = pyparsing.Regex('\[(\s?YOCTO\s?#\s?(\d+)\s?,?)+\]') | ||
34 | revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"') | ||
35 | signoff_prog = parse_signed_off_by.signed_off_by | ||
36 | revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"') | ||
37 | maxlength = 90 | ||
38 | |||
39 | # base paths of main yocto project sub-projects | ||
40 | paths = { | ||
41 | 'oe-core': ['meta-selftest', 'meta-skeleton', 'meta', 'scripts'], | ||
42 | 'bitbake': ['bitbake'], | ||
43 | 'documentation': ['documentation'], | ||
44 | 'poky': ['meta-poky','meta-yocto-bsp'], | ||
45 | 'oe': ['meta-gpe', 'meta-gnome', 'meta-efl', 'meta-networking', 'meta-multimedia','meta-initramfs', 'meta-ruby', 'contrib', 'meta-xfce', 'meta-filesystems', 'meta-perl', 'meta-webserver', 'meta-systemd', 'meta-oe', 'meta-python'] | ||
46 | } | ||
47 | |||
48 | # scripts folder is a mix of oe-core and poky, most is oe-core code except: | ||
49 | poky_scripts = ['scripts/yocto-bsp', 'scripts/yocto-kernel', 'scripts/yocto-layer', 'scripts/lib/bsp'] | ||
50 | |||
51 | Project = collections.namedtuple('Project', ['name', 'listemail', 'gitrepo', 'paths']) | ||
52 | |||
53 | bitbake = Project(name='Bitbake', listemail='bitbake-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/bitbake/', paths=paths['bitbake']) | ||
54 | doc = Project(name='Documentantion', listemail='yocto@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/', paths=paths['documentation']) | ||
55 | poky = Project(name='Poky', listemail='poky@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/poky/', paths=paths['poky']) | ||
56 | oe = Project(name='oe', listemail='openembedded-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/meta-openembedded/', paths=paths['oe']) | ||
57 | |||
58 | |||
59 | def test_signed_off_by_presence(self): | ||
60 | for commit in TestMbox.commits: | ||
61 | # skip those patches that revert older commits, these do not required the tag presence | ||
62 | if self.revert_shortlog_regex.search_string(commit.shortlog): | ||
63 | continue | ||
64 | if not self.signoff_prog.search_string(commit.payload): | ||
65 | self.fail('Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"', | ||
66 | commit=commit) | ||
67 | |||
68 | def test_shortlog_format(self): | ||
69 | for commit in TestMbox.commits: | ||
70 | shortlog = commit.shortlog | ||
71 | if not shortlog.strip(): | ||
72 | self.skip('Empty shortlog, no reason to execute shortlog format test') | ||
73 | else: | ||
74 | # no reason to re-check on revert shortlogs | ||
75 | if shortlog.startswith('Revert "'): | ||
76 | continue | ||
77 | try: | ||
78 | parse_shortlog.shortlog.parseString(shortlog) | ||
79 | except pyparsing.ParseException as pe: | ||
80 | self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"', | ||
81 | commit=commit) | ||
82 | |||
83 | def test_shortlog_length(self): | ||
84 | for commit in TestMbox.commits: | ||
85 | # no reason to re-check on revert shortlogs | ||
86 | shortlog = commit.shortlog | ||
87 | if shortlog.startswith('Revert "'): | ||
88 | continue | ||
89 | l = len(shortlog) | ||
90 | if l > self.maxlength: | ||
91 | self.fail('Edit shortlog so that it is %d characters or less (currently %d characters)' % (self.maxlength, l), | ||
92 | commit=commit) | ||
93 | |||
94 | def test_series_merge_on_head(self): | ||
95 | self.skip("Merge test is disabled for now") | ||
96 | if PatchTestInput.repo.branch != "master": | ||
97 | self.skip("Skipping merge test since patch is not intended for master branch. Target detected is %s" % PatchTestInput.repo.branch) | ||
98 | if not PatchTestInput.repo.ismerged: | ||
99 | commithash, author, date, shortlog = headlog() | ||
100 | self.fail('Series does not apply on top of target branch %s' % PatchTestInput.repo.branch, | ||
101 | data=[('Targeted branch', '%s (currently at %s)' % (PatchTestInput.repo.branch, commithash))]) | ||
102 | |||
103 | def test_target_mailing_list(self): | ||
104 | """In case of merge failure, check for other targeted projects""" | ||
105 | if PatchTestInput.repo.ismerged: | ||
106 | self.skip('Series merged, no reason to check other mailing lists') | ||
107 | |||
108 | # a meta project may be indicted in the message subject, if this is the case, just fail | ||
109 | # TODO: there may be other project with no-meta prefix, we also need to detect these | ||
110 | project_regex = pyparsing.Regex("\[(?P<project>meta-.+)\]") | ||
111 | for commit in TestMbox.commits: | ||
112 | match = project_regex.search_string(commit.subject) | ||
113 | if match: | ||
114 | self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists', | ||
115 | commit=commit) | ||
116 | |||
117 | for patch in self.patchset: | ||
118 | folders = patch.path.split('/') | ||
119 | base_path = folders[0] | ||
120 | for project in [self.bitbake, self.doc, self.oe, self.poky]: | ||
121 | if base_path in project.paths: | ||
122 | self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists', | ||
123 | data=[('Suggested ML', '%s [%s]' % (project.listemail, project.gitrepo)), | ||
124 | ('Patch\'s path:', patch.path)]) | ||
125 | |||
126 | # check for poky's scripts code | ||
127 | if base_path.startswith('scripts'): | ||
128 | for poky_file in self.poky_scripts: | ||
129 | if patch.path.startswith(poky_file): | ||
130 | self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists', | ||
131 | data=[('Suggested ML', '%s [%s]' % (self.poky.listemail, self.poky.gitrepo)),('Patch\'s path:', patch.path)]) | ||
132 | |||
133 | def test_mbox_format(self): | ||
134 | if self.unidiff_parse_error: | ||
135 | self.fail('Series has malformed diff lines. Create the series again using git-format-patch and ensure it applies using git am', | ||
136 | data=[('Diff line',self.unidiff_parse_error)]) | ||
137 | |||
138 | def test_commit_message_presence(self): | ||
139 | for commit in TestMbox.commits: | ||
140 | if not commit.commit_message.strip(): | ||
141 | self.fail('Please include a commit message on your patch explaining the change', commit=commit) | ||
142 | |||
143 | def test_bugzilla_entry_format(self): | ||
144 | for commit in TestMbox.commits: | ||
145 | if not self.rexp_detect.search_string(commit.commit_message): | ||
146 | self.skip("No bug ID found") | ||
147 | elif not self.rexp_validation.search_string(commit.commit_message): | ||
148 | self.fail('Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"', commit=commit) | ||
149 | |||
150 | def test_author_valid(self): | ||
151 | for commit in self.commits: | ||
152 | for invalid in self.invalids: | ||
153 | if invalid.search_string(commit.author): | ||
154 | self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit) | ||
155 | |||
156 | def test_non_auh_upgrade(self): | ||
157 | for commit in self.commits: | ||
158 | if self.auh_email in commit.payload: | ||
159 | self.fail('Invalid author %s. Resend the series with a valid patch author' % self.auh_email, commit=commit) | ||
diff --git a/meta/lib/patchtest/tests/test_metadata.py b/meta/lib/patchtest/tests/test_metadata.py new file mode 100644 index 0000000000..f5dbcf01ed --- /dev/null +++ b/meta/lib/patchtest/tests/test_metadata.py | |||
@@ -0,0 +1,197 @@ | |||
1 | # Checks related to the patch's LIC_FILES_CHKSUM metadata variable | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import base | ||
8 | import os | ||
9 | import pyparsing | ||
10 | from data import PatchTestInput, PatchTestDataStore | ||
11 | |||
12 | class TestMetadata(base.Metadata): | ||
13 | metadata_lic = 'LICENSE' | ||
14 | invalid_license = 'PATCHTESTINVALID' | ||
15 | metadata_chksum = 'LIC_FILES_CHKSUM' | ||
16 | license_var = 'LICENSE' | ||
17 | closed = 'CLOSED' | ||
18 | lictag_re = pyparsing.AtLineStart("License-Update:") | ||
19 | lic_chksum_added = pyparsing.AtLineStart("+" + metadata_chksum) | ||
20 | lic_chksum_removed = pyparsing.AtLineStart("-" + metadata_chksum) | ||
21 | add_mark = pyparsing.Regex('\\+ ') | ||
22 | max_length = 200 | ||
23 | metadata_src_uri = 'SRC_URI' | ||
24 | md5sum = 'md5sum' | ||
25 | sha256sum = 'sha256sum' | ||
26 | git_regex = pyparsing.Regex('^git\\:\\/\\/.*') | ||
27 | metadata_summary = 'SUMMARY' | ||
28 | cve_check_ignore_var = 'CVE_CHECK_IGNORE' | ||
29 | cve_status_var = 'CVE_STATUS' | ||
30 | |||
31 | def test_license_presence(self): | ||
32 | if not self.added: | ||
33 | self.skip('No added recipes, skipping test') | ||
34 | |||
35 | # TODO: this is a workaround so we can parse the recipe not | ||
36 | # containing the LICENSE var: add some default license instead | ||
37 | # of INVALID into auto.conf, then remove this line at the end | ||
38 | auto_conf = os.path.join(os.environ.get('BUILDDIR'), 'conf', 'auto.conf') | ||
39 | open_flag = 'w' | ||
40 | if os.path.exists(auto_conf): | ||
41 | open_flag = 'a' | ||
42 | with open(auto_conf, open_flag) as fd: | ||
43 | for pn in self.added: | ||
44 | fd.write('LICENSE ??= "%s"\n' % self.invalid_license) | ||
45 | |||
46 | no_license = False | ||
47 | for pn in self.added: | ||
48 | rd = self.tinfoil.parse_recipe(pn) | ||
49 | license = rd.getVar(self.metadata_lic) | ||
50 | if license == self.invalid_license: | ||
51 | no_license = True | ||
52 | break | ||
53 | |||
54 | # remove auto.conf line or the file itself | ||
55 | if open_flag == 'w': | ||
56 | os.remove(auto_conf) | ||
57 | else: | ||
58 | fd = open(auto_conf, 'r') | ||
59 | lines = fd.readlines() | ||
60 | fd.close() | ||
61 | with open(auto_conf, 'w') as fd: | ||
62 | fd.write(''.join(lines[:-1])) | ||
63 | |||
64 | if no_license: | ||
65 | self.fail('Recipe does not have the LICENSE field set.') | ||
66 | |||
67 | def test_lic_files_chksum_presence(self): | ||
68 | if not self.added: | ||
69 | self.skip('No added recipes, skipping test') | ||
70 | |||
71 | for pn in self.added: | ||
72 | rd = self.tinfoil.parse_recipe(pn) | ||
73 | pathname = rd.getVar('FILE') | ||
74 | # we are not interested in images | ||
75 | if '/images/' in pathname: | ||
76 | continue | ||
77 | lic_files_chksum = rd.getVar(self.metadata_chksum) | ||
78 | if rd.getVar(self.license_var) == self.closed: | ||
79 | continue | ||
80 | if not lic_files_chksum: | ||
81 | self.fail('%s is missing in newly added recipe' % self.metadata_chksum) | ||
82 | |||
83 | def test_lic_files_chksum_modified_not_mentioned(self): | ||
84 | if not self.modified: | ||
85 | self.skip('No modified recipes, skipping test') | ||
86 | |||
87 | for patch in self.patchset: | ||
88 | # for the moment, we are just interested in metadata | ||
89 | if patch.path.endswith('.patch'): | ||
90 | continue | ||
91 | payload = str(patch) | ||
92 | if (self.lic_chksum_added.search_string(payload) or self.lic_chksum_removed.search_string(payload)): | ||
93 | # if any patch on the series contain reference on the metadata, fail | ||
94 | for commit in self.commits: | ||
95 | if self.lictag_re.search_string(commit.commit_message): | ||
96 | break | ||
97 | else: | ||
98 | self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message') | ||
99 | |||
100 | def test_max_line_length(self): | ||
101 | for patch in self.patchset: | ||
102 | # for the moment, we are just interested in metadata | ||
103 | if patch.path.endswith('.patch'): | ||
104 | continue | ||
105 | payload = str(patch) | ||
106 | for line in payload.splitlines(): | ||
107 | if self.add_mark.search_string(line): | ||
108 | current_line_length = len(line[1:]) | ||
109 | if current_line_length > self.max_length: | ||
110 | self.fail('Patch line too long (current length %s, maximum is %s)' % (current_line_length, self.max_length), | ||
111 | data=[('Patch', patch.path), ('Line', '%s ...' % line[0:80])]) | ||
112 | |||
113 | def pretest_src_uri_left_files(self): | ||
114 | # these tests just make sense on patches that can be merged | ||
115 | if not PatchTestInput.repo.canbemerged: | ||
116 | self.skip('Patch cannot be merged') | ||
117 | if not self.modified: | ||
118 | self.skip('No modified recipes, skipping pretest') | ||
119 | |||
120 | # get the proper metadata values | ||
121 | for pn in self.modified: | ||
122 | # we are not interested in images | ||
123 | if 'core-image' in pn: | ||
124 | continue | ||
125 | rd = self.tinfoil.parse_recipe(pn) | ||
126 | PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)] = rd.getVar(self.metadata_src_uri) | ||
127 | |||
128 | def test_src_uri_left_files(self): | ||
129 | # these tests just make sense on patches that can be merged | ||
130 | if not PatchTestInput.repo.canbemerged: | ||
131 | self.skip('Patch cannot be merged') | ||
132 | if not self.modified: | ||
133 | self.skip('No modified recipes, skipping pretest') | ||
134 | |||
135 | # get the proper metadata values | ||
136 | for pn in self.modified: | ||
137 | # we are not interested in images | ||
138 | if 'core-image' in pn: | ||
139 | continue | ||
140 | rd = self.tinfoil.parse_recipe(pn) | ||
141 | PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)] = rd.getVar(self.metadata_src_uri) | ||
142 | |||
143 | for pn in self.modified: | ||
144 | pretest_src_uri = PatchTestDataStore['pre%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)].split() | ||
145 | test_src_uri = PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)].split() | ||
146 | |||
147 | pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')]) | ||
148 | test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')]) | ||
149 | |||
150 | # check if files were removed | ||
151 | if len(test_files) < len(pretest_files): | ||
152 | |||
153 | # get removals from patchset | ||
154 | filesremoved_from_patchset = set() | ||
155 | for patch in self.patchset: | ||
156 | if patch.is_removed_file: | ||
157 | filesremoved_from_patchset.add(os.path.basename(patch.path)) | ||
158 | |||
159 | # get the deleted files from the SRC_URI | ||
160 | filesremoved_from_usr_uri = pretest_files - test_files | ||
161 | |||
162 | # finally, get those patches removed at SRC_URI and not removed from the patchset | ||
163 | # TODO: we are not taking into account renames, so test may raise false positives | ||
164 | not_removed = filesremoved_from_usr_uri - filesremoved_from_patchset | ||
165 | if not_removed: | ||
166 | self.fail('Patches not removed from tree. Remove them and amend the submitted mbox', | ||
167 | data=[('Patch', f) for f in not_removed]) | ||
168 | |||
169 | def test_summary_presence(self): | ||
170 | if not self.added: | ||
171 | self.skip('No added recipes, skipping test') | ||
172 | |||
173 | for pn in self.added: | ||
174 | # we are not interested in images | ||
175 | if 'core-image' in pn: | ||
176 | continue | ||
177 | rd = self.tinfoil.parse_recipe(pn) | ||
178 | summary = rd.getVar(self.metadata_summary) | ||
179 | |||
180 | # "${PN} version ${PN}-${PR}" is the default, so fail if default | ||
181 | if summary.startswith('%s version' % pn): | ||
182 | self.fail('%s is missing in newly added recipe' % self.metadata_summary) | ||
183 | |||
184 | def test_cve_check_ignore(self): | ||
185 | # Skip if we neither modified a recipe or target branches are not | ||
186 | # Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield. | ||
187 | if not self.modified or PatchTestInput.repo.branch == "kirkstone" or PatchTestInput.repo.branch == "dunfell": | ||
188 | self.skip('No modified recipes or older target branch, skipping test') | ||
189 | for pn in self.modified: | ||
190 | # we are not interested in images | ||
191 | if 'core-image' in pn: | ||
192 | continue | ||
193 | rd = self.tinfoil.parse_recipe(pn) | ||
194 | cve_check_ignore = rd.getVar(self.cve_check_ignore_var) | ||
195 | |||
196 | if cve_check_ignore is not None: | ||
197 | self.fail('%s is deprecated and should be replaced by %s' % (self.cve_check_ignore_var, self.cve_status_var)) | ||
diff --git a/meta/lib/patchtest/tests/test_patch.py b/meta/lib/patchtest/tests/test_patch.py new file mode 100644 index 0000000000..d7187a0cb1 --- /dev/null +++ b/meta/lib/patchtest/tests/test_patch.py | |||
@@ -0,0 +1,103 @@ | |||
1 | # Checks related to the patch's CVE lines | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | import base | ||
9 | import os | ||
10 | import parse_signed_off_by | ||
11 | import parse_upstream_status | ||
12 | import pyparsing | ||
13 | |||
14 | class TestPatch(base.Base): | ||
15 | |||
16 | re_cve_pattern = pyparsing.Regex("CVE\-\d{4}\-\d+") | ||
17 | re_cve_payload_tag = pyparsing.Regex("\+CVE:(\s+CVE\-\d{4}\-\d+)+") | ||
18 | upstream_status_regex = pyparsing.AtLineStart("+" + "Upstream-Status") | ||
19 | |||
20 | @classmethod | ||
21 | def setUpClassLocal(cls): | ||
22 | cls.newpatches = [] | ||
23 | # get just those relevant patches: new software patches | ||
24 | for patch in cls.patchset: | ||
25 | if patch.path.endswith('.patch') and patch.is_added_file: | ||
26 | cls.newpatches.append(patch) | ||
27 | |||
28 | cls.mark = str(parse_signed_off_by.signed_off_by_mark).strip('"') | ||
29 | |||
30 | # match PatchSignedOffBy.mark with '+' preceding it | ||
31 | cls.prog = parse_signed_off_by.patch_signed_off_by | ||
32 | |||
33 | def setUp(self): | ||
34 | if self.unidiff_parse_error: | ||
35 | self.skip('Parse error %s' % self.unidiff_parse_error) | ||
36 | |||
37 | self.valid_status = ', '.join(parse_upstream_status.upstream_status_nonliteral_valid_status) | ||
38 | self.standard_format = 'Upstream-Status: <Valid status>' | ||
39 | |||
40 | # we are just interested in series that introduce CVE patches, thus discard other | ||
41 | # possibilities: modification to current CVEs, patch directly introduced into the | ||
42 | # recipe, upgrades already including the CVE, etc. | ||
43 | new_cves = [p for p in self.patchset if p.path.endswith('.patch') and p.is_added_file] | ||
44 | if not new_cves: | ||
45 | self.skip('No new CVE patches introduced') | ||
46 | |||
47 | def test_upstream_status_presence_format(self): | ||
48 | if not TestPatch.newpatches: | ||
49 | self.skip("There are no new software patches, no reason to test Upstream-Status presence/format") | ||
50 | |||
51 | for newpatch in TestPatch.newpatches: | ||
52 | payload = newpatch.__str__() | ||
53 | if not self.upstream_status_regex.search_string(payload): | ||
54 | self.fail('Added patch file is missing Upstream-Status: <Valid status> in the commit message', | ||
55 | data=[('Standard format', self.standard_format), ('Valid status', self.valid_status)]) | ||
56 | for line in payload.splitlines(): | ||
57 | if self.patchmetadata_regex.match(line): | ||
58 | continue | ||
59 | if self.upstream_status_regex.search_string(line): | ||
60 | if parse_upstream_status.inappropriate_status_mark.searchString(line): | ||
61 | try: | ||
62 | parse_upstream_status.upstream_status_inappropriate_info.parseString(line.lstrip('+')) | ||
63 | except pyparsing.ParseException as pe: | ||
64 | self.fail('Upstream-Status is Inappropriate, but no reason was provided', | ||
65 | data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Inappropriate [reason]')]) | ||
66 | elif parse_upstream_status.submitted_status_mark.searchString(line): | ||
67 | try: | ||
68 | parse_upstream_status.upstream_status_submitted_info.parseString(line.lstrip('+')) | ||
69 | except pyparsing.ParseException as pe: | ||
70 | self.fail('Upstream-Status is Submitted, but it is not mentioned where', | ||
71 | data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Submitted [where]')]) | ||
72 | else: | ||
73 | try: | ||
74 | parse_upstream_status.upstream_status.parseString(line.lstrip('+')) | ||
75 | except pyparsing.ParseException as pe: | ||
76 | self.fail('Upstream-Status is in incorrect format', | ||
77 | data=[('Current', pe.pstr), ('Standard format', self.standard_format), ('Valid status', self.valid_status)]) | ||
78 | |||
79 | def test_signed_off_by_presence(self): | ||
80 | if not TestPatch.newpatches: | ||
81 | self.skip("There are no new software patches, no reason to test %s presence" % PatchSignedOffBy.mark) | ||
82 | |||
83 | for newpatch in TestPatch.newpatches: | ||
84 | payload = newpatch.__str__() | ||
85 | for line in payload.splitlines(): | ||
86 | if self.patchmetadata_regex.match(line): | ||
87 | continue | ||
88 | if TestPatch.prog.search_string(payload): | ||
89 | break | ||
90 | else: | ||
91 | self.fail('A patch file has been added without a Signed-off-by tag: \'%s\'' % os.path.basename(newpatch.path)) | ||
92 | |||
93 | def test_cve_tag_format(self): | ||
94 | for commit in TestPatch.commits: | ||
95 | if self.re_cve_pattern.search_string(commit.shortlog) or self.re_cve_pattern.search_string(commit.commit_message): | ||
96 | tag_found = False | ||
97 | for line in commit.payload.splitlines(): | ||
98 | if self.re_cve_payload_tag.search_string(line): | ||
99 | tag_found = True | ||
100 | break | ||
101 | if not tag_found: | ||
102 | self.fail('Missing or incorrectly formatted CVE tag in patch file. Correct or include the CVE tag in the patch with format: "CVE: CVE-YYYY-XXXX"', | ||
103 | commit=commit) | ||
diff --git a/meta/lib/patchtest/tests/test_python_pylint.py b/meta/lib/patchtest/tests/test_python_pylint.py new file mode 100644 index 0000000000..ef315e591c --- /dev/null +++ b/meta/lib/patchtest/tests/test_python_pylint.py | |||
@@ -0,0 +1,65 @@ | |||
1 | # Checks related to the python code done with pylint | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import base | ||
8 | from io import StringIO | ||
9 | from data import PatchTestInput | ||
10 | from pylint.reporters.text import TextReporter | ||
11 | import pylint.lint as lint | ||
12 | |||
13 | |||
14 | class PyLint(base.Base): | ||
15 | pythonpatches = [] | ||
16 | pylint_pretest = {} | ||
17 | pylint_test = {} | ||
18 | pylint_options = " -E --disable='E0611, E1101, F0401, E0602' --msg-template='L:{line} F:{module} I:{msg}'" | ||
19 | |||
20 | @classmethod | ||
21 | def setUpClassLocal(cls): | ||
22 | # get just those patches touching python files | ||
23 | cls.pythonpatches = [] | ||
24 | for patch in cls.patchset: | ||
25 | if patch.path.endswith('.py'): | ||
26 | if not patch.is_removed_file: | ||
27 | cls.pythonpatches.append(patch) | ||
28 | |||
29 | def setUp(self): | ||
30 | if self.unidiff_parse_error: | ||
31 | self.skip('Python-unidiff parse error') | ||
32 | if not PyLint.pythonpatches: | ||
33 | self.skip('No python related patches, skipping test') | ||
34 | |||
35 | def pretest_pylint(self): | ||
36 | for pythonpatch in self.pythonpatches: | ||
37 | if pythonpatch.is_modified_file: | ||
38 | pylint_output = StringIO() | ||
39 | reporter = TextReporter(pylint_output) | ||
40 | lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False) | ||
41 | for line in pylint_output.readlines(): | ||
42 | if not '*' in line: | ||
43 | if line.strip(): | ||
44 | self.pylint_pretest[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1] | ||
45 | |||
46 | def test_pylint(self): | ||
47 | for pythonpatch in self.pythonpatches: | ||
48 | # a condition checking whether a file is renamed or not | ||
49 | # unidiff doesn't support this yet | ||
50 | if pythonpatch.target_file is not pythonpatch.path: | ||
51 | path = pythonpatch.target_file[2:] | ||
52 | else: | ||
53 | path = pythonpatch.path | ||
54 | pylint_output = StringIO() | ||
55 | reporter = TextReporter(pylint_output) | ||
56 | lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False) | ||
57 | for line in pylint_output.readlines(): | ||
58 | if not '*' in line: | ||
59 | if line.strip(): | ||
60 | self.pylint_test[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1] | ||
61 | |||
62 | for issue in self.pylint_test: | ||
63 | if self.pylint_test[issue] not in self.pylint_pretest.values(): | ||
64 | self.fail('Errors in your Python code were encountered. Please check your code with a linter and resubmit', | ||
65 | data=[('Output', 'Please, fix the listed issues:'), ('', issue + ' ' + self.pylint_test[issue])]) | ||