mirror of
git://git.yoctoproject.org/poky.git
synced 2025-07-19 21:09:03 +02:00
patchtest: simplify, rename modules
- simplify base.py, data.py - move some leftover regex patterns to patterns.py - remove pyparsing path logic, since this is no longer needed - rename PatchTestInput class to PatchtestParser - data.py: rename to patchtest_parser.py - patterns.py: rename to patchtest_patterns.py - move PatchTestDataStore to test_metadata.py since that's the only place it's used - remove unused logger code (From OE-Core rev: 1e971b05b036b0b1eb0bdbd9b26b54d06e74294c) Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
18a65c77c0
commit
4c378fc895
|
@ -15,19 +15,11 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
import collections
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger=logging.getLogger('patchtest')
|
|
||||||
info=logger.info
|
|
||||||
|
|
||||||
default_testdir = os.path.abspath(os.path.dirname(__file__) + "/tests")
|
default_testdir = os.path.abspath(os.path.dirname(__file__) + "/tests")
|
||||||
default_repodir = os.path.abspath(os.path.dirname(__file__) + "/../../..")
|
default_repodir = os.path.abspath(os.path.dirname(__file__) + "/../../..")
|
||||||
|
|
||||||
# Data store commonly used to share values between pre and post-merge tests
|
class PatchtestParser(object):
|
||||||
PatchTestDataStore = collections.defaultdict(str)
|
|
||||||
|
|
||||||
class PatchTestInput(object):
|
|
||||||
"""Abstract the patchtest argument parser"""
|
"""Abstract the patchtest argument parser"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
|
@ -5,6 +5,7 @@
|
||||||
# SPDX-License-Identifier: GPL-2.0-only
|
# SPDX-License-Identifier: GPL-2.0-only
|
||||||
|
|
||||||
import pyparsing
|
import pyparsing
|
||||||
|
import re
|
||||||
|
|
||||||
# general
|
# general
|
||||||
colon = pyparsing.Literal(":")
|
colon = pyparsing.Literal(":")
|
||||||
|
@ -34,10 +35,16 @@ lic_chksum_added = pyparsing.AtLineStart("+" + metadata_chksum)
|
||||||
lic_chksum_removed = pyparsing.AtLineStart("-" + metadata_chksum)
|
lic_chksum_removed = pyparsing.AtLineStart("-" + metadata_chksum)
|
||||||
add_mark = pyparsing.Regex('\\+ ')
|
add_mark = pyparsing.Regex('\\+ ')
|
||||||
patch_max_line_length = 200
|
patch_max_line_length = 200
|
||||||
metadata_src_uri = 'SRC_URI'
|
metadata_src_uri = "SRC_URI"
|
||||||
metadata_summary = 'SUMMARY'
|
metadata_summary = "SUMMARY"
|
||||||
cve_check_ignore_var = 'CVE_CHECK_IGNORE'
|
cve_check_ignore_var = "CVE_CHECK_IGNORE"
|
||||||
cve_status_var = 'CVE_STATUS'
|
cve_status_var = "CVE_STATUS"
|
||||||
|
endcommit_messages_regex = re.compile(
|
||||||
|
r"\(From \w+-\w+ rev:|(?<!\S)Signed-off-by|(?<!\S)---\n"
|
||||||
|
)
|
||||||
|
patchmetadata_regex = re.compile(
|
||||||
|
r"-{3} \S+|\+{3} \S+|@{2} -\d+,\d+ \+\d+,\d+ @{2} \S+"
|
||||||
|
)
|
||||||
|
|
||||||
# mbox
|
# mbox
|
||||||
auh_email = 'auh@yoctoproject.org'
|
auh_email = 'auh@yoctoproject.org'
|
|
@ -8,21 +8,24 @@ import unittest
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
import unidiff
|
import unidiff
|
||||||
from data import PatchTestInput
|
from patchtest_parser import PatchtestParser
|
||||||
import mailbox
|
import mailbox
|
||||||
|
import patchtest_patterns
|
||||||
import collections
|
import collections
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'pyparsing'))
|
logger = logging.getLogger("patchtest")
|
||||||
|
|
||||||
logger = logging.getLogger('patchtest')
|
|
||||||
debug = logger.debug
|
debug = logger.debug
|
||||||
info = logger.info
|
info = logger.info
|
||||||
warn = logger.warn
|
warn = logger.warn
|
||||||
error = logger.error
|
error = logger.error
|
||||||
|
|
||||||
|
Commit = collections.namedtuple(
|
||||||
|
"Commit", ["author", "subject", "commit_message", "shortlog", "payload"]
|
||||||
|
)
|
||||||
|
|
||||||
Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload'])
|
Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload'])
|
||||||
|
|
||||||
class PatchtestOEError(Exception):
|
class PatchtestOEError(Exception):
|
||||||
|
@ -34,9 +37,6 @@ class PatchtestOEError(Exception):
|
||||||
class Base(unittest.TestCase):
|
class Base(unittest.TestCase):
|
||||||
# if unit test fails, fail message will throw at least the following JSON: {"id": <testid>}
|
# if unit test fails, fail message will throw at least the following JSON: {"id": <testid>}
|
||||||
|
|
||||||
endcommit_messages_regex = re.compile(r'\(From \w+-\w+ rev:|(?<!\S)Signed-off-by|(?<!\S)---\n')
|
|
||||||
patchmetadata_regex = re.compile(r'-{3} \S+|\+{3} \S+|@{2} -\d+,\d+ \+\d+,\d+ @{2} \S+')
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def msg_to_commit(msg):
|
def msg_to_commit(msg):
|
||||||
payload = msg.get_payload()
|
payload = msg.get_payload()
|
||||||
|
@ -49,7 +49,7 @@ class Base(unittest.TestCase):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def commit_message(payload):
|
def commit_message(payload):
|
||||||
commit_message = payload.__str__()
|
commit_message = payload.__str__()
|
||||||
match = Base.endcommit_messages_regex.search(payload)
|
match = patchtest_patterns.endcommit_messages_regex.search(payload)
|
||||||
if match:
|
if match:
|
||||||
commit_message = payload[:match.start()]
|
commit_message = payload[:match.start()]
|
||||||
return commit_message
|
return commit_message
|
||||||
|
@ -65,13 +65,15 @@ class Base(unittest.TestCase):
|
||||||
def setUpClass(cls):
|
def setUpClass(cls):
|
||||||
|
|
||||||
# General objects: mailbox.mbox and patchset
|
# General objects: mailbox.mbox and patchset
|
||||||
cls.mbox = mailbox.mbox(PatchTestInput.repo.patch.path)
|
cls.mbox = mailbox.mbox(PatchtestParser.repo.patch.path)
|
||||||
|
|
||||||
# Patch may be malformed, so try parsing it
|
# Patch may be malformed, so try parsing it
|
||||||
cls.unidiff_parse_error = ''
|
cls.unidiff_parse_error = ''
|
||||||
cls.patchset = None
|
cls.patchset = None
|
||||||
try:
|
try:
|
||||||
cls.patchset = unidiff.PatchSet.from_filename(PatchTestInput.repo.patch.path, encoding=u'UTF-8')
|
cls.patchset = unidiff.PatchSet.from_filename(
|
||||||
|
PatchtestParser.repo.patch.path, encoding="UTF-8"
|
||||||
|
)
|
||||||
except unidiff.UnidiffParseError as upe:
|
except unidiff.UnidiffParseError as upe:
|
||||||
cls.patchset = []
|
cls.patchset = []
|
||||||
cls.unidiff_parse_error = str(upe)
|
cls.unidiff_parse_error = str(upe)
|
||||||
|
@ -148,7 +150,7 @@ class Metadata(Base):
|
||||||
|
|
||||||
# import relevant libraries
|
# import relevant libraries
|
||||||
try:
|
try:
|
||||||
scripts_path = os.path.join(PatchTestInput.repodir, 'scripts', 'lib')
|
scripts_path = os.path.join(PatchtestParser.repodir, "scripts", "lib")
|
||||||
if scripts_path not in sys.path:
|
if scripts_path not in sys.path:
|
||||||
sys.path.insert(0, scripts_path)
|
sys.path.insert(0, scripts_path)
|
||||||
import scriptpath
|
import scriptpath
|
||||||
|
@ -223,11 +225,23 @@ class Metadata(Base):
|
||||||
for patch in patchset:
|
for patch in patchset:
|
||||||
if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'):
|
if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'):
|
||||||
if patch.is_added_file:
|
if patch.is_added_file:
|
||||||
added_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
|
added_paths.append(
|
||||||
|
os.path.join(
|
||||||
|
os.path.abspath(PatchtestParser.repodir), patch.path
|
||||||
|
)
|
||||||
|
)
|
||||||
elif patch.is_modified_file:
|
elif patch.is_modified_file:
|
||||||
modified_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
|
modified_paths.append(
|
||||||
|
os.path.join(
|
||||||
|
os.path.abspath(PatchtestParser.repodir), patch.path
|
||||||
|
)
|
||||||
|
)
|
||||||
elif patch.is_removed_file:
|
elif patch.is_removed_file:
|
||||||
removed_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
|
removed_paths.append(
|
||||||
|
os.path.join(
|
||||||
|
os.path.abspath(PatchtestParser.repodir), patch.path
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items()
|
data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items()
|
||||||
|
|
||||||
|
|
|
@ -6,15 +6,15 @@
|
||||||
|
|
||||||
import base
|
import base
|
||||||
import collections
|
import collections
|
||||||
import patterns
|
import patchtest_patterns
|
||||||
import pyparsing
|
import pyparsing
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
from data import PatchTestInput
|
from patchtest_parser import PatchtestParser
|
||||||
|
|
||||||
def headlog():
|
def headlog():
|
||||||
output = subprocess.check_output(
|
output = subprocess.check_output(
|
||||||
"cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchTestInput.repodir,
|
"cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchtestParser.repodir,
|
||||||
universal_newlines=True,
|
universal_newlines=True,
|
||||||
shell=True
|
shell=True
|
||||||
)
|
)
|
||||||
|
@ -45,11 +45,13 @@ class TestMbox(base.Base):
|
||||||
def test_signed_off_by_presence(self):
|
def test_signed_off_by_presence(self):
|
||||||
for commit in self.commits:
|
for commit in self.commits:
|
||||||
# skip those patches that revert older commits, these do not required the tag presence
|
# skip those patches that revert older commits, these do not required the tag presence
|
||||||
if patterns.mbox_revert_shortlog_regex.search_string(commit.shortlog):
|
if patchtest_patterns.mbox_revert_shortlog_regex.search_string(commit.shortlog):
|
||||||
continue
|
continue
|
||||||
if not patterns.signed_off_by.search_string(commit.payload):
|
if not patchtest_patterns.signed_off_by.search_string(commit.payload):
|
||||||
self.fail('Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"',
|
self.fail(
|
||||||
commit=commit)
|
'Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"',
|
||||||
|
commit=commit,
|
||||||
|
)
|
||||||
|
|
||||||
def test_shortlog_format(self):
|
def test_shortlog_format(self):
|
||||||
for commit in self.commits:
|
for commit in self.commits:
|
||||||
|
@ -61,7 +63,7 @@ class TestMbox(base.Base):
|
||||||
if shortlog.startswith('Revert "'):
|
if shortlog.startswith('Revert "'):
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
patterns.shortlog.parseString(shortlog)
|
patchtest_patterns.shortlog.parseString(shortlog)
|
||||||
except pyparsing.ParseException as pe:
|
except pyparsing.ParseException as pe:
|
||||||
self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"',
|
self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"',
|
||||||
commit=commit)
|
commit=commit)
|
||||||
|
@ -73,22 +75,34 @@ class TestMbox(base.Base):
|
||||||
if shortlog.startswith('Revert "'):
|
if shortlog.startswith('Revert "'):
|
||||||
continue
|
continue
|
||||||
l = len(shortlog)
|
l = len(shortlog)
|
||||||
if l > patterns.mbox_shortlog_maxlength:
|
if l > patchtest_patterns.mbox_shortlog_maxlength:
|
||||||
self.fail('Edit shortlog so that it is %d characters or less (currently %d characters)' % (patterns.mbox_shortlog_maxlength, l),
|
self.fail(
|
||||||
commit=commit)
|
"Edit shortlog so that it is %d characters or less (currently %d characters)"
|
||||||
|
% (patchtest_patterns.mbox_shortlog_maxlength, l),
|
||||||
|
commit=commit,
|
||||||
|
)
|
||||||
|
|
||||||
def test_series_merge_on_head(self):
|
def test_series_merge_on_head(self):
|
||||||
self.skip("Merge test is disabled for now")
|
self.skip("Merge test is disabled for now")
|
||||||
if PatchTestInput.repo.patch.branch != "master":
|
if PatchtestParser.repo.patch.branch != "master":
|
||||||
self.skip(
|
self.skip(
|
||||||
"Skipping merge test since patch is not intended"
|
"Skipping merge test since patch is not intended"
|
||||||
" for master branch. Target detected is %s"
|
" for master branch. Target detected is %s"
|
||||||
% PatchTestInput.repo.patch.branch
|
% PatchtestParser.repo.patch.branch
|
||||||
)
|
)
|
||||||
if not PatchTestInput.repo.canbemerged:
|
if not PatchtestParser.repo.canbemerged:
|
||||||
commithash, author, date, shortlog = headlog()
|
commithash, author, date, shortlog = headlog()
|
||||||
self.fail('Series does not apply on top of target branch %s' % PatchTestInput.repo.branch,
|
self.fail(
|
||||||
data=[('Targeted branch', '%s (currently at %s)' % (PatchTestInput.repo.branch, commithash))])
|
"Series does not apply on top of target branch %s"
|
||||||
|
% PatchtestParser.repo.patch.branch,
|
||||||
|
data=[
|
||||||
|
(
|
||||||
|
"Targeted branch",
|
||||||
|
"%s (currently at %s)"
|
||||||
|
% (PatchtestParser.repo.patch.branch, commithash),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
def test_target_mailing_list(self):
|
def test_target_mailing_list(self):
|
||||||
"""Check for other targeted projects"""
|
"""Check for other targeted projects"""
|
||||||
|
@ -129,19 +143,28 @@ class TestMbox(base.Base):
|
||||||
self.fail('Please include a commit message on your patch explaining the change', commit=commit)
|
self.fail('Please include a commit message on your patch explaining the change', commit=commit)
|
||||||
|
|
||||||
def test_bugzilla_entry_format(self):
|
def test_bugzilla_entry_format(self):
|
||||||
for commit in TestMbox.commits:
|
for commit in self.commits:
|
||||||
if not patterns.mbox_bugzilla.search_string(commit.commit_message):
|
if not patchtest_patterns.mbox_bugzilla.search_string(commit.commit_message):
|
||||||
self.skip("No bug ID found")
|
self.skip("No bug ID found")
|
||||||
elif not patterns.mbox_bugzilla_validation.search_string(commit.commit_message):
|
elif not patchtest_patterns.mbox_bugzilla_validation.search_string(
|
||||||
self.fail('Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"', commit=commit)
|
commit.commit_message
|
||||||
|
):
|
||||||
|
self.fail(
|
||||||
|
'Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"',
|
||||||
|
commit=commit,
|
||||||
|
)
|
||||||
|
|
||||||
def test_author_valid(self):
|
def test_author_valid(self):
|
||||||
for commit in self.commits:
|
for commit in self.commits:
|
||||||
for invalid in patterns.invalid_submitters:
|
for invalid in patchtest_patterns.invalid_submitters:
|
||||||
if invalid.search_string(commit.author):
|
if invalid.search_string(commit.author):
|
||||||
self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit)
|
self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit)
|
||||||
|
|
||||||
def test_non_auh_upgrade(self):
|
def test_non_auh_upgrade(self):
|
||||||
for commit in self.commits:
|
for commit in self.commits:
|
||||||
if patterns.auh_email in commit.commit_message:
|
if patchtest_patterns.auh_email in commit.commit_message:
|
||||||
self.fail('Invalid author %s. Resend the series with a valid patch author' % patterns.auh_email, commit=commit)
|
self.fail(
|
||||||
|
"Invalid author %s. Resend the series with a valid patch author"
|
||||||
|
% patchtest_patterns.auh_email,
|
||||||
|
commit=commit,
|
||||||
|
)
|
||||||
|
|
|
@ -5,10 +5,14 @@
|
||||||
# SPDX-License-Identifier: GPL-2.0-only
|
# SPDX-License-Identifier: GPL-2.0-only
|
||||||
|
|
||||||
import base
|
import base
|
||||||
|
import collections
|
||||||
import os
|
import os
|
||||||
import patterns
|
import patchtest_patterns
|
||||||
import pyparsing
|
import pyparsing
|
||||||
from data import PatchTestInput, PatchTestDataStore
|
from patchtest_parser import PatchtestParser
|
||||||
|
|
||||||
|
# Data store commonly used to share values between pre and post-merge tests
|
||||||
|
PatchTestDataStore = collections.defaultdict(str)
|
||||||
|
|
||||||
class TestMetadata(base.Metadata):
|
class TestMetadata(base.Metadata):
|
||||||
|
|
||||||
|
@ -25,13 +29,13 @@ class TestMetadata(base.Metadata):
|
||||||
open_flag = 'a'
|
open_flag = 'a'
|
||||||
with open(auto_conf, open_flag) as fd:
|
with open(auto_conf, open_flag) as fd:
|
||||||
for pn in self.added:
|
for pn in self.added:
|
||||||
fd.write('LICENSE ??= "%s"\n' % patterns.invalid_license)
|
fd.write('LICENSE ??= "%s"\n' % patchtest_patterns.invalid_license)
|
||||||
|
|
||||||
no_license = False
|
no_license = False
|
||||||
for pn in self.added:
|
for pn in self.added:
|
||||||
rd = self.tinfoil.parse_recipe(pn)
|
rd = self.tinfoil.parse_recipe(pn)
|
||||||
license = rd.getVar(patterns.metadata_lic)
|
license = rd.getVar(patchtest_patterns.metadata_lic)
|
||||||
if license == patterns.invalid_license:
|
if license == patchtest_patterns.invalid_license:
|
||||||
no_license = True
|
no_license = True
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -58,11 +62,13 @@ class TestMetadata(base.Metadata):
|
||||||
# we are not interested in images
|
# we are not interested in images
|
||||||
if '/images/' in pathname:
|
if '/images/' in pathname:
|
||||||
continue
|
continue
|
||||||
lic_files_chksum = rd.getVar(patterns.metadata_chksum)
|
lic_files_chksum = rd.getVar(patchtest_patterns.metadata_chksum)
|
||||||
if rd.getVar(patterns.license_var) == patterns.closed:
|
if rd.getVar(patchtest_patterns.license_var) == patchtest_patterns.closed:
|
||||||
continue
|
continue
|
||||||
if not lic_files_chksum:
|
if not lic_files_chksum:
|
||||||
self.fail('%s is missing in newly added recipe' % patterns.metadata_chksum)
|
self.fail(
|
||||||
|
"%s is missing in newly added recipe" % patchtest_patterns.metadata_chksum
|
||||||
|
)
|
||||||
|
|
||||||
def test_lic_files_chksum_modified_not_mentioned(self):
|
def test_lic_files_chksum_modified_not_mentioned(self):
|
||||||
if not self.modified:
|
if not self.modified:
|
||||||
|
@ -73,10 +79,12 @@ class TestMetadata(base.Metadata):
|
||||||
if patch.path.endswith('.patch'):
|
if patch.path.endswith('.patch'):
|
||||||
continue
|
continue
|
||||||
payload = str(patch)
|
payload = str(patch)
|
||||||
if (patterns.lic_chksum_added.search_string(payload) or patterns.lic_chksum_removed.search_string(payload)):
|
if patchtest_patterns.lic_chksum_added.search_string(
|
||||||
|
payload
|
||||||
|
) or patchtest_patterns.lic_chksum_removed.search_string(payload):
|
||||||
# if any patch on the series contain reference on the metadata, fail
|
# if any patch on the series contain reference on the metadata, fail
|
||||||
for commit in self.commits:
|
for commit in self.commits:
|
||||||
if patterns.lictag_re.search_string(commit.commit_message):
|
if patchtest_patterns.lictag_re.search_string(commit.commit_message):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message')
|
self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message')
|
||||||
|
@ -88,16 +96,22 @@ class TestMetadata(base.Metadata):
|
||||||
continue
|
continue
|
||||||
payload = str(patch)
|
payload = str(patch)
|
||||||
for line in payload.splitlines():
|
for line in payload.splitlines():
|
||||||
if patterns.add_mark.search_string(line):
|
if patchtest_patterns.add_mark.search_string(line):
|
||||||
current_line_length = len(line[1:])
|
current_line_length = len(line[1:])
|
||||||
if current_line_length > patterns.patch_max_line_length:
|
if current_line_length > patchtest_patterns.patch_max_line_length:
|
||||||
self.fail('Patch line too long (current length %s, maximum is %s)' % (current_line_length, patterns.patch_max_line_length),
|
self.fail(
|
||||||
data=[('Patch', patch.path), ('Line', '%s ...' % line[0:80])])
|
"Patch line too long (current length %s, maximum is %s)"
|
||||||
|
% (current_line_length, patchtest_patterns.patch_max_line_length),
|
||||||
|
data=[
|
||||||
|
("Patch", patch.path),
|
||||||
|
("Line", "%s ..." % line[0:80]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
def pretest_src_uri_left_files(self):
|
def pretest_src_uri_left_files(self):
|
||||||
# these tests just make sense on patches that can be merged
|
# these tests just make sense on patches that can be merged
|
||||||
if not PatchTestInput.repo.canbemerged:
|
if not PatchtestParser.repo.canbemerged:
|
||||||
self.skip('Patch cannot be merged')
|
self.skip("Patch cannot be merged")
|
||||||
if not self.modified:
|
if not self.modified:
|
||||||
self.skip('No modified recipes, skipping pretest')
|
self.skip('No modified recipes, skipping pretest')
|
||||||
|
|
||||||
|
@ -107,12 +121,14 @@ class TestMetadata(base.Metadata):
|
||||||
if 'core-image' in pn:
|
if 'core-image' in pn:
|
||||||
continue
|
continue
|
||||||
rd = self.tinfoil.parse_recipe(pn)
|
rd = self.tinfoil.parse_recipe(pn)
|
||||||
PatchTestDataStore['%s-%s-%s' % (self.shortid(), patterns.metadata_src_uri, pn)] = rd.getVar(patterns.metadata_src_uri)
|
PatchTestDataStore[
|
||||||
|
"%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn)
|
||||||
|
] = rd.getVar(patchtest_patterns.metadata_src_uri)
|
||||||
|
|
||||||
def test_src_uri_left_files(self):
|
def test_src_uri_left_files(self):
|
||||||
# these tests just make sense on patches that can be merged
|
# these tests just make sense on patches that can be merged
|
||||||
if not PatchTestInput.repo.canbemerged:
|
if not PatchtestParser.repo.canbemerged:
|
||||||
self.skip('Patch cannot be merged')
|
self.skip("Patch cannot be merged")
|
||||||
if not self.modified:
|
if not self.modified:
|
||||||
self.skip('No modified recipes, skipping pretest')
|
self.skip('No modified recipes, skipping pretest')
|
||||||
|
|
||||||
|
@ -122,11 +138,17 @@ class TestMetadata(base.Metadata):
|
||||||
if 'core-image' in pn:
|
if 'core-image' in pn:
|
||||||
continue
|
continue
|
||||||
rd = self.tinfoil.parse_recipe(pn)
|
rd = self.tinfoil.parse_recipe(pn)
|
||||||
PatchTestDataStore['%s-%s-%s' % (self.shortid(), patterns.metadata_src_uri, pn)] = rd.getVar(patterns.metadata_src_uri)
|
PatchTestDataStore[
|
||||||
|
"%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn)
|
||||||
|
] = rd.getVar(patchtest_patterns.metadata_src_uri)
|
||||||
|
|
||||||
for pn in self.modified:
|
for pn in self.modified:
|
||||||
pretest_src_uri = PatchTestDataStore['pre%s-%s-%s' % (self.shortid(), patterns.metadata_src_uri, pn)].split()
|
pretest_src_uri = PatchTestDataStore[
|
||||||
test_src_uri = PatchTestDataStore['%s-%s-%s' % (self.shortid(), patterns.metadata_src_uri, pn)].split()
|
"pre%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn)
|
||||||
|
].split()
|
||||||
|
test_src_uri = PatchTestDataStore[
|
||||||
|
"%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn)
|
||||||
|
].split()
|
||||||
|
|
||||||
pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')])
|
pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')])
|
||||||
test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')])
|
test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')])
|
||||||
|
@ -159,23 +181,32 @@ class TestMetadata(base.Metadata):
|
||||||
if 'core-image' in pn:
|
if 'core-image' in pn:
|
||||||
continue
|
continue
|
||||||
rd = self.tinfoil.parse_recipe(pn)
|
rd = self.tinfoil.parse_recipe(pn)
|
||||||
summary = rd.getVar(patterns.metadata_summary)
|
summary = rd.getVar(patchtest_patterns.metadata_summary)
|
||||||
|
|
||||||
# "${PN} version ${PN}-${PR}" is the default, so fail if default
|
# "${PN} version ${PN}-${PR}" is the default, so fail if default
|
||||||
if summary.startswith('%s version' % pn):
|
if summary.startswith("%s version" % pn):
|
||||||
self.fail('%s is missing in newly added recipe' % patterns.metadata_summary)
|
self.fail(
|
||||||
|
"%s is missing in newly added recipe" % patchtest_patterns.metadata_summary
|
||||||
|
)
|
||||||
|
|
||||||
def test_cve_check_ignore(self):
|
def test_cve_check_ignore(self):
|
||||||
# Skip if we neither modified a recipe or target branches are not
|
# Skip if we neither modified a recipe or target branches are not
|
||||||
# Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield.
|
# Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield.
|
||||||
if not self.modified or PatchTestInput.repo.patch.branch == "kirkstone" or PatchTestInput.repo.patch.branch == "dunfell":
|
if (
|
||||||
self.skip('No modified recipes or older target branch, skipping test')
|
not self.modified
|
||||||
|
or PatchtestParser.repo.patch.branch == "kirkstone"
|
||||||
|
or PatchtestParser.repo.patch.branch == "dunfell"
|
||||||
|
):
|
||||||
|
self.skip("No modified recipes or older target branch, skipping test")
|
||||||
for pn in self.modified:
|
for pn in self.modified:
|
||||||
# we are not interested in images
|
# we are not interested in images
|
||||||
if 'core-image' in pn:
|
if 'core-image' in pn:
|
||||||
continue
|
continue
|
||||||
rd = self.tinfoil.parse_recipe(pn)
|
rd = self.tinfoil.parse_recipe(pn)
|
||||||
cve_check_ignore = rd.getVar(patterns.cve_check_ignore_var)
|
cve_check_ignore = rd.getVar(patchtest_patterns.cve_check_ignore_var)
|
||||||
|
|
||||||
if cve_check_ignore is not None:
|
if cve_check_ignore is not None:
|
||||||
self.fail('%s is deprecated and should be replaced by %s' % (patterns.cve_check_ignore_var, patterns.cve_status_var))
|
self.fail(
|
||||||
|
"%s is deprecated and should be replaced by %s"
|
||||||
|
% (patchtest_patterns.cve_check_ignore_var, patchtest_patterns.cve_status_var)
|
||||||
|
)
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
import base
|
import base
|
||||||
import os
|
import os
|
||||||
import patterns
|
import patchtest_patterns
|
||||||
import pyparsing
|
import pyparsing
|
||||||
|
|
||||||
class TestPatch(base.Base):
|
class TestPatch(base.Base):
|
||||||
|
@ -20,17 +20,17 @@ class TestPatch(base.Base):
|
||||||
if patch.path.endswith('.patch') and patch.is_added_file:
|
if patch.path.endswith('.patch') and patch.is_added_file:
|
||||||
cls.newpatches.append(patch)
|
cls.newpatches.append(patch)
|
||||||
|
|
||||||
cls.mark = str(patterns.signed_off_by_prefix).strip('"')
|
cls.mark = str(patchtest_patterns.signed_off_by_prefix).strip('"')
|
||||||
|
|
||||||
# match PatchSignedOffBy.mark with '+' preceding it
|
# match PatchSignedOffBy.mark with '+' preceding it
|
||||||
cls.prog = patterns.patch_signed_off_by
|
cls.prog = patchtest_patterns.patch_signed_off_by
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
if self.unidiff_parse_error:
|
if self.unidiff_parse_error:
|
||||||
self.skip('Parse error %s' % self.unidiff_parse_error)
|
self.skip('Parse error %s' % self.unidiff_parse_error)
|
||||||
|
|
||||||
self.valid_status = ', '.join(patterns.upstream_status_nonliteral_valid_status)
|
self.valid_status = ", ".join(patchtest_patterns.upstream_status_nonliteral_valid_status)
|
||||||
self.standard_format = 'Upstream-Status: <Valid status>'
|
self.standard_format = "Upstream-Status: <Valid status>"
|
||||||
|
|
||||||
# we are just interested in series that introduce CVE patches, thus discard other
|
# we are just interested in series that introduce CVE patches, thus discard other
|
||||||
# possibilities: modification to current CVEs, patch directly introduced into the
|
# possibilities: modification to current CVEs, patch directly introduced into the
|
||||||
|
@ -45,31 +45,62 @@ class TestPatch(base.Base):
|
||||||
|
|
||||||
for newpatch in TestPatch.newpatches:
|
for newpatch in TestPatch.newpatches:
|
||||||
payload = newpatch.__str__()
|
payload = newpatch.__str__()
|
||||||
if not patterns.upstream_status_regex.search_string(payload):
|
if not patchtest_patterns.upstream_status_regex.search_string(payload):
|
||||||
self.fail('Added patch file is missing Upstream-Status: <Valid status> in the commit message',
|
self.fail(
|
||||||
data=[('Standard format', self.standard_format), ('Valid status', self.valid_status)])
|
"Added patch file is missing Upstream-Status: <Valid status> in the commit message",
|
||||||
|
data=[
|
||||||
|
("Standard format", self.standard_format),
|
||||||
|
("Valid status", self.valid_status),
|
||||||
|
],
|
||||||
|
)
|
||||||
for line in payload.splitlines():
|
for line in payload.splitlines():
|
||||||
if self.patchmetadata_regex.match(line):
|
if patchtest_patterns.patchmetadata_regex.match(line):
|
||||||
continue
|
continue
|
||||||
if patterns.upstream_status_regex.search_string(line):
|
if patchtest_patterns.upstream_status_regex.search_string(line):
|
||||||
if patterns.inappropriate.searchString(line):
|
if patchtest_patterns.inappropriate.searchString(line):
|
||||||
try:
|
try:
|
||||||
patterns.upstream_status_inappropriate_info.parseString(line.lstrip('+'))
|
patchtest_patterns.upstream_status_inappropriate_info.parseString(
|
||||||
|
line.lstrip("+")
|
||||||
|
)
|
||||||
except pyparsing.ParseException as pe:
|
except pyparsing.ParseException as pe:
|
||||||
self.fail('Upstream-Status is Inappropriate, but no reason was provided',
|
self.fail(
|
||||||
data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Inappropriate [reason]')])
|
"Upstream-Status is Inappropriate, but no reason was provided",
|
||||||
elif patterns.submitted.searchString(line):
|
data=[
|
||||||
|
("Current", pe.pstr),
|
||||||
|
(
|
||||||
|
"Standard format",
|
||||||
|
"Upstream-Status: Inappropriate [reason]",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
elif patchtest_patterns.submitted.searchString(line):
|
||||||
try:
|
try:
|
||||||
patterns.upstream_status_submitted_info.parseString(line.lstrip('+'))
|
patchtest_patterns.upstream_status_submitted_info.parseString(
|
||||||
|
line.lstrip("+")
|
||||||
|
)
|
||||||
except pyparsing.ParseException as pe:
|
except pyparsing.ParseException as pe:
|
||||||
self.fail('Upstream-Status is Submitted, but it is not mentioned where',
|
self.fail(
|
||||||
data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Submitted [where]')])
|
"Upstream-Status is Submitted, but it is not mentioned where",
|
||||||
|
data=[
|
||||||
|
("Current", pe.pstr),
|
||||||
|
(
|
||||||
|
"Standard format",
|
||||||
|
"Upstream-Status: Submitted [where]",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
patterns.upstream_status.parseString(line.lstrip('+'))
|
patchtest_patterns.upstream_status.parseString(line.lstrip("+"))
|
||||||
except pyparsing.ParseException as pe:
|
except pyparsing.ParseException as pe:
|
||||||
self.fail('Upstream-Status is in incorrect format',
|
self.fail(
|
||||||
data=[('Current', pe.pstr), ('Standard format', self.standard_format), ('Valid status', self.valid_status)])
|
"Upstream-Status is in incorrect format",
|
||||||
|
data=[
|
||||||
|
("Current", pe.pstr),
|
||||||
|
("Standard format", self.standard_format),
|
||||||
|
("Valid status", self.valid_status),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
def test_signed_off_by_presence(self):
|
def test_signed_off_by_presence(self):
|
||||||
if not TestPatch.newpatches:
|
if not TestPatch.newpatches:
|
||||||
|
@ -78,7 +109,7 @@ class TestPatch(base.Base):
|
||||||
for newpatch in TestPatch.newpatches:
|
for newpatch in TestPatch.newpatches:
|
||||||
payload = newpatch.__str__()
|
payload = newpatch.__str__()
|
||||||
for line in payload.splitlines():
|
for line in payload.splitlines():
|
||||||
if self.patchmetadata_regex.match(line):
|
if patchtest_patterns.patchmetadata_regex.match(line):
|
||||||
continue
|
continue
|
||||||
if TestPatch.prog.search_string(payload):
|
if TestPatch.prog.search_string(payload):
|
||||||
break
|
break
|
||||||
|
@ -87,10 +118,12 @@ class TestPatch(base.Base):
|
||||||
|
|
||||||
def test_cve_tag_format(self):
|
def test_cve_tag_format(self):
|
||||||
for commit in TestPatch.commits:
|
for commit in TestPatch.commits:
|
||||||
if patterns.cve.search_string(commit.shortlog) or patterns.cve.search_string(commit.commit_message):
|
if patchtest_patterns.cve.search_string(
|
||||||
|
commit.shortlog
|
||||||
|
) or patchtest_patterns.cve.search_string(commit.commit_message):
|
||||||
tag_found = False
|
tag_found = False
|
||||||
for line in commit.payload.splitlines():
|
for line in commit.payload.splitlines():
|
||||||
if patterns.cve_payload_tag.search_string(line):
|
if patchtest_patterns.cve_payload_tag.search_string(line):
|
||||||
tag_found = True
|
tag_found = True
|
||||||
break
|
break
|
||||||
if not tag_found:
|
if not tag_found:
|
||||||
|
|
|
@ -5,9 +5,8 @@
|
||||||
# SPDX-License-Identifier: GPL-2.0-only
|
# SPDX-License-Identifier: GPL-2.0-only
|
||||||
|
|
||||||
import base
|
import base
|
||||||
import patterns
|
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from data import PatchTestInput
|
from patchtest_parser import PatchtestParser
|
||||||
from pylint.reporters.text import TextReporter
|
from pylint.reporters.text import TextReporter
|
||||||
import pylint.lint as lint
|
import pylint.lint as lint
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
|
||||||
# Include patchtest library
|
# Include patchtest library
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest'))
|
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest'))
|
||||||
|
|
||||||
from data import PatchTestInput
|
from patchtest_parser import PatchtestParser
|
||||||
from repo import PatchTestRepo
|
from repo import PatchTestRepo
|
||||||
|
|
||||||
logger = logging.getLogger("patchtest")
|
logger = logging.getLogger("patchtest")
|
||||||
|
@ -47,10 +47,10 @@ def getResult(patch, mergepatch, logfile=None):
|
||||||
def startTestRun(self):
|
def startTestRun(self):
|
||||||
# let's create the repo already, it can be used later on
|
# let's create the repo already, it can be used later on
|
||||||
repoargs = {
|
repoargs = {
|
||||||
'repodir': PatchTestInput.repodir,
|
"repodir": PatchtestParser.repodir,
|
||||||
'commit' : PatchTestInput.basecommit,
|
"commit": PatchtestParser.basecommit,
|
||||||
'branch' : PatchTestInput.basebranch,
|
"branch": PatchtestParser.basebranch,
|
||||||
'patch' : patch,
|
"patch": patch,
|
||||||
}
|
}
|
||||||
|
|
||||||
self.repo_error = False
|
self.repo_error = False
|
||||||
|
@ -58,7 +58,7 @@ def getResult(patch, mergepatch, logfile=None):
|
||||||
self.test_failure = False
|
self.test_failure = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.repo = PatchTestInput.repo = PatchTestRepo(**repoargs)
|
self.repo = PatchtestParser.repo = PatchTestRepo(**repoargs)
|
||||||
except:
|
except:
|
||||||
logger.error(traceback.print_exc())
|
logger.error(traceback.print_exc())
|
||||||
self.repo_error = True
|
self.repo_error = True
|
||||||
|
@ -129,7 +129,11 @@ def _runner(resultklass, prefix=None):
|
||||||
loader.testMethodPrefix = prefix
|
loader.testMethodPrefix = prefix
|
||||||
|
|
||||||
# create the suite with discovered tests and the corresponding runner
|
# create the suite with discovered tests and the corresponding runner
|
||||||
suite = loader.discover(start_dir=PatchTestInput.testdir, pattern=PatchTestInput.pattern, top_level_dir=PatchTestInput.topdir)
|
suite = loader.discover(
|
||||||
|
start_dir=PatchtestParser.testdir,
|
||||||
|
pattern=PatchtestParser.pattern,
|
||||||
|
top_level_dir=PatchtestParser.topdir,
|
||||||
|
)
|
||||||
ntc = suite.countTestCases()
|
ntc = suite.countTestCases()
|
||||||
|
|
||||||
# if there are no test cases, just quit
|
# if there are no test cases, just quit
|
||||||
|
@ -173,12 +177,12 @@ def run(patch, logfile=None):
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
tmp_patch = False
|
tmp_patch = False
|
||||||
patch_path = PatchTestInput.patch_path
|
patch_path = PatchtestParser.patch_path
|
||||||
log_results = PatchTestInput.log_results
|
log_results = PatchtestParser.log_results
|
||||||
log_path = None
|
log_path = None
|
||||||
patch_list = None
|
patch_list = None
|
||||||
|
|
||||||
git_status = os.popen("(cd %s && git status)" % PatchTestInput.repodir).read()
|
git_status = os.popen("(cd %s && git status)" % PatchtestParser.repodir).read()
|
||||||
status_matches = ["Changes not staged for commit", "Changes to be committed"]
|
status_matches = ["Changes not staged for commit", "Changes to be committed"]
|
||||||
if any([match in git_status for match in status_matches]):
|
if any([match in git_status for match in status_matches]):
|
||||||
logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest")
|
logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest")
|
||||||
|
@ -213,16 +217,16 @@ def main():
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
ret = 1
|
ret = 1
|
||||||
|
|
||||||
# Parse the command line arguments and store it on the PatchTestInput namespace
|
# Parse the command line arguments and store it on the PatchtestParser namespace
|
||||||
PatchTestInput.set_namespace()
|
PatchtestParser.set_namespace()
|
||||||
|
|
||||||
# set debugging level
|
# set debugging level
|
||||||
if PatchTestInput.debug:
|
if PatchtestParser.debug:
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
# if topdir not define, default it to testdir
|
# if topdir not define, default it to testdir
|
||||||
if not PatchTestInput.topdir:
|
if not PatchtestParser.topdir:
|
||||||
PatchTestInput.topdir = PatchTestInput.testdir
|
PatchtestParser.topdir = PatchtestParser.testdir
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ret = main()
|
ret = main()
|
||||||
|
|
Loading…
Reference in New Issue
Block a user