diff --git a/src/mlx/warnings/__init__.py b/src/mlx/warnings/__init__.py index 2aa126d..354f8b0 100644 --- a/src/mlx/warnings/__init__.py +++ b/src/mlx/warnings/__init__.py @@ -1,21 +1,21 @@ """ Melexis fork of warnings plugin """ __all__ = [ - 'CoverityChecker', - 'DoxyChecker', - 'Finding', - 'JUnitChecker', - 'PolyspaceChecker', - 'PolyspaceFamilyChecker', - 'RobotChecker', - 'RobotSuiteChecker', - 'SphinxChecker', - 'WarningsChecker', - 'WarningsPlugin', - 'XMLRunnerChecker', - '__version__', - 'warnings_wrapper', - 'WarningsConfigError', + "CoverityChecker", + "DoxyChecker", + "Finding", + "JUnitChecker", + "PolyspaceChecker", + "PolyspaceFamilyChecker", + "RobotChecker", + "RobotSuiteChecker", + "SphinxChecker", + "WarningsChecker", + "WarningsPlugin", + "XMLRunnerChecker", + "__version__", + "warnings_wrapper", + "WarningsConfigError", ] diff --git a/src/mlx/warnings/__main__.py b/src/mlx/warnings/__main__.py index f865668..dfd4a76 100644 --- a/src/mlx/warnings/__main__.py +++ b/src/mlx/warnings/__main__.py @@ -6,5 +6,5 @@ """ from mlx.warnings.warnings import main -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/mlx/warnings/code_quality.py b/src/mlx/warnings/code_quality.py index b9bd7b8..b73f966 100644 --- a/src/mlx/warnings/code_quality.py +++ b/src/mlx/warnings/code_quality.py @@ -31,7 +31,7 @@ def fingerprint(self): 4. Step 3 is repeated until a unique hash is obtained. """ hashable_string = f"{self.severity}{self.path}{self.description}" - new_hash = hashlib.md5(str(hashable_string).encode('utf-8')).hexdigest() + new_hash = hashlib.md5(str(hashable_string).encode("utf-8")).hexdigest() while new_hash in self.fingerprints and self.fingerprints[new_hash] != self: new_hash = hashlib.md5(f"{hashable_string}{new_hash}".encode()).hexdigest() type(self).fingerprints[new_hash] = self diff --git a/src/mlx/warnings/junit_checker.py b/src/mlx/warnings/junit_checker.py index f969c73..7622d30 100644 --- a/src/mlx/warnings/junit_checker.py +++ b/src/mlx/warnings/junit_checker.py @@ -11,16 +11,16 @@ class JUnitChecker(WarningsChecker): - name = 'junit' + name = "junit" def check(self, content): - ''' Function for counting the number of JUnit failures in a specific text + """Function for counting the number of JUnit failures in a specific text Args: content (str): The content to parse - ''' + """ try: - root_input = etree.fromstring(content.encode('utf-8')) + root_input = etree.fromstring(content.encode("utf-8")) testsuites_root = self.prepare_tree(root_input) suites = JUnitXml.fromelem(testsuites_root) amount_to_exclude = 0 @@ -34,19 +34,19 @@ def check(self, content): @property def name_repr(self): - return "JUnit" if self.name == 'junit' else self.name.capitalize() + return "JUnit" if self.name == "junit" else self.name.capitalize() @staticmethod def prepare_tree(root_input): - ''' Prepares the tree element by adding a testsuites element as root when missing (to please JUnitXml) + """Prepares the tree element by adding a testsuites element as root when missing (to please JUnitXml) Args: root_input (lxml.etree._Element/xml.etree.ElementTree.Element): Top-level XML element from input file Returns: lxml.etree._Element/xml.etree.ElementTree.Element: Top-level XML element with testsuites tag - ''' - if root_input.tag.startswith('testsuite') and root_input.find('testcase') is None: + """ + if root_input.tag.startswith("testsuite") and root_input.find("testcase") is None: testsuites_root = root_input else: testsuites_root = etree.Element("testsuites") @@ -54,7 +54,7 @@ def prepare_tree(root_input): return testsuites_root def _check_testcase(self, testcase, extra={}): - """ Handles the check of a test case element by checking if the result is a failure/error. + """Handles the check of a test case element by checking if the result is a failure/error. If it is to be excluded by a configured regex, 1 is returned. Otherwise, when in verbose/output mode, the suite name and test case name are printed/written along with the @@ -69,6 +69,6 @@ def _check_testcase(self, testcase, extra={}): if isinstance(testcase.result, (Failure, Error)): if self._is_excluded(testcase.result.message): return 1 - self.logger.info(f'{testcase.classname}.{testcase.name}') - self.logger.debug(f'{testcase.classname}.{testcase.name} | {testcase.result.message}') + self.logger.info(f"{testcase.classname}.{testcase.name}") + self.logger.debug(f"{testcase.classname}.{testcase.name} | {testcase.result.message}") return 0 diff --git a/src/mlx/warnings/polyspace_checker.py b/src/mlx/warnings/polyspace_checker.py index d43eebc..b188e24 100644 --- a/src/mlx/warnings/polyspace_checker.py +++ b/src/mlx/warnings/polyspace_checker.py @@ -11,24 +11,24 @@ class PolyspaceChecker(WarningsChecker): - name = 'polyspace' + name = "polyspace" checkers = [] def __init__(self, *logging_args): '''Constructor to set the default code quality description template to "Polyspace: $check"''' super().__init__(*logging_args) - self._cq_description_template = Template('Polyspace: $check') + self._cq_description_template = Template("Polyspace: $check") @property def cq_findings(self): - ''' List[dict]: list of code quality findings''' + """List[dict]: list of code quality findings""" for checker in self.checkers: self._cq_findings.extend(checker.cq_findings) return self._cq_findings @property def cq_description_template(self): - ''' Template: string.Template instance based on the configured template string ''' + """Template: string.Template instance based on the configured template string""" return self._cq_description_template @cq_description_template.setter @@ -37,11 +37,11 @@ def cq_description_template(self, template_obj): @property def minimum(self): - ''' Gets the lowest minimum amount of warnings + """Gets the lowest minimum amount of warnings Returns: int: the lowest minimum for warnings - ''' + """ if self.checkers: return min(x.minimum for x in self.checkers) return 0 @@ -53,11 +53,11 @@ def minimum(self, minimum): @property def maximum(self): - ''' Gets the highest minimum amount of warnings + """Gets the highest minimum amount of warnings Returns: int: the highest maximum for warnings - ''' + """ if self.checkers: return max(x.maximum for x in self.checkers) return 0 @@ -68,43 +68,43 @@ def maximum(self, maximum): checker.maximum = maximum def check(self, content): - ''' + """ Function for counting the number of failures in a TSV file exported by Polyspace Args: content (_io.TextIOWrapper): The open file to parse - ''' + """ if not isinstance(content, TextIOWrapper): raise TypeError( f"{self.__class__.__name__} can't handle this type; expected {type(TextIOWrapper)}; got {type(content)}" ) - reader = csv.DictReader(content, dialect='excel-tab') + reader = csv.DictReader(content, dialect="excel-tab") # set column names to lowercase reader.fieldnames = [name.lower() for name in reader.fieldnames] for row in reader: for checker in self.checkers: - if row['family'].lower() == checker.family_value: + if row["family"].lower() == checker.family_value: checker.check(row) def return_count(self): - ''' Getter function for the amount of warnings found + """Getter function for the amount of warnings found Returns: int: Number of warnings found - ''' + """ self.count = 0 for checker in self.checkers: self.count += checker.return_count() return self.count def return_check_limits(self): - ''' Function for checking whether the warning count is within the configured limits + """Function for checking whether the warning count is within the configured limits Returns: int: 0 if the amount of warnings is within limits, the count of warnings otherwise (or 1 in case of a count of 0 warnings) - ''' + """ count = 0 for checker in self.checkers: count += checker.return_check_limits() @@ -130,10 +130,10 @@ def parse_config(self, config): if family_value == "enabled": continue if family_value == "cq_description_template": - self.cq_description_template = Template(config['cq_description_template']) + self.cq_description_template = Template(config["cq_description_template"]) continue if family_value == "cq_default_path": - self.cq_default_path = config['cq_default_path'] + self.cq_default_path = config["cq_default_path"] continue if family_value == "exclude": self.add_patterns(config.get("exclude"), self.exclude_patterns) @@ -164,7 +164,7 @@ def parse_config(self, config): class PolyspaceFamilyChecker(WarningsChecker): - name = 'polyspace_sub' + name = "polyspace_sub" subchecker = True code_quality_severity = { "impact: high": "critical", @@ -190,7 +190,7 @@ def __init__(self, family_value, column_name, check_value, *logging_args): @property def cq_description_template(self): - ''' Template: string.Template instance based on the configured template string ''' + """Template: string.Template instance based on the configured template string""" return self._cq_description_template @cq_description_template.setter @@ -198,11 +198,11 @@ def cq_description_template(self, template_obj): self._cq_description_template = template_obj def add_code_quality_finding(self, row): - '''Add code quality finding + """Add code quality finding Args: row (dict): The row of the warning with the corresponding colomn names - ''' + """ try: description = self.cq_description_template.substitute(os.environ, **row) except KeyError as err: @@ -222,12 +222,12 @@ def add_code_quality_finding(self, row): self.cq_findings.append(finding.to_dict()) def check(self, content): - ''' + """ Function for counting the number of failures in a TSV/CSV file exported by Polyspace Args: content (dict): The row of the TSV file - ''' + """ if content[self.column_name].lower() == self.check_value: if content["status"].lower() in ["not a defect", "justified"]: self.logger.info(f"Excluded defect with ID {content.get('id', None)!r} because the status is " diff --git a/src/mlx/warnings/regex_checker.py b/src/mlx/warnings/regex_checker.py index 1639bf8..c772fc5 100644 --- a/src/mlx/warnings/regex_checker.py +++ b/src/mlx/warnings/regex_checker.py @@ -20,25 +20,25 @@ class RegexChecker(WarningsChecker): - name = 'regex' + name = "regex" pattern = None SEVERITY_MAP = { - 'debug': 'info', - 'info': 'info', - 'notice': 'info', - 'warning': 'major', - 'error': 'critical', - 'severe': 'critical', - 'critical': 'critical', - 'failed': 'critical', + "debug": "info", + "info": "info", + "notice": "info", + "warning": "major", + "error": "critical", + "severe": "critical", + "critical": "critical", + "failed": "critical", } def check(self, content): - ''' Function for counting the number of warnings in a specific text + """Function for counting the number of warnings in a specific text Args: content (str): The content to parse - ''' + """ matches = re.finditer(self.pattern, content) for match in matches: match_string = match.group(0).strip() @@ -51,11 +51,11 @@ def check(self, content): self.add_code_quality_finding(match) def add_code_quality_finding(self, match): - '''Add code quality finding + """Add code quality finding Args: match (re.Match): The regex match - ''' + """ groups = {name: result for name, result in match.groupdict().items() if result} description = next((result for name, result in groups.items() if name.startswith("description")), None) @@ -72,12 +72,12 @@ def add_code_quality_finding(self, match): class CoverityChecker(RegexChecker): - name = 'coverity' + name = "coverity" pattern = coverity_pattern def __init__(self, *logging_args): super().__init__(*logging_args) - self._cq_description_template = Template('Coverity: CID $cid: $checker') + self._cq_description_template = Template("Coverity: CID $cid: $checker") self.checkers = { "unclassified": CoverityClassificationChecker("unclassified", *logging_args), "pending": CoverityClassificationChecker("pending", *logging_args), @@ -88,14 +88,14 @@ def __init__(self, *logging_args): @property def cq_findings(self): - ''' List[dict]: list of code quality findings''' + """List[dict]: list of code quality findings""" for checker in self.checkers.values(): self._cq_findings.extend(checker.cq_findings) return self._cq_findings @property def cq_description_template(self): - ''' Template: string.Template instance based on the configured template string ''' + """Template: string.Template instance based on the configured template string""" return self._cq_description_template @cq_description_template.setter @@ -103,23 +103,23 @@ def cq_description_template(self, template_obj): self._cq_description_template = template_obj def return_count(self): - ''' Getter function for the amount of warnings found + """Getter function for the amount of warnings found Returns: int: Number of warnings found - ''' + """ self.count = 0 for checker in self.checkers.values(): self.count += checker.return_count() return self.count def return_check_limits(self): - ''' Function for checking whether the warning count is within the configured limits + """Function for checking whether the warning count is within the configured limits Returns: int: 0 if the amount of warnings is within limits, the count of warnings otherwise (or 1 in case of a count of 0 warnings) - ''' + """ count = 0 for checker in self.checkers.values(): count += checker.return_check_limits() @@ -128,13 +128,13 @@ def return_check_limits(self): return count def check(self, content): - ''' + """ Function for counting the number of warnings, but adopted for Coverity output Args: content (str): The content to parse - ''' + """ matches = re.finditer(self.pattern, content) for match in matches: if (classification := match.group("classification").lower()) in self.checkers: @@ -169,15 +169,15 @@ def parse_config(self, config): class CoverityClassificationChecker(WarningsChecker): - name = 'coverity_sub' + name = "coverity_sub" subchecker = True logging_fmt = "{checker.name_repr}: {checker.classification:<14} | {message}" SEVERITY_MAP = { - 'false positive': 'info', - 'intentional': 'info', - 'bug': 'major', - 'unclassified': 'major', - 'pending': 'critical', + "false positive": "info", + "intentional": "info", + "bug": "major", + "unclassified": "major", + "pending": "critical", } def __init__(self, classification, *args): @@ -191,7 +191,7 @@ def __init__(self, classification, *args): @property def cq_description_template(self): - ''' Template: string.Template instance based on the configured template string ''' + """Template: string.Template instance based on the configured template string""" return self._cq_description_template @cq_description_template.setter @@ -199,11 +199,11 @@ def cq_description_template(self, template_obj): self._cq_description_template = template_obj def add_code_quality_finding(self, match): - '''Add code quality finding + """Add code quality finding Args: match (re.Match): The regex match - ''' + """ groups = {name: result for name, result in match.groupdict().items() if result} try: description = self.cq_description_template.substitute(os.environ, **groups) @@ -224,15 +224,15 @@ def add_code_quality_finding(self, match): self.cq_findings.append(finding.to_dict()) def check(self, content): - ''' + """ Function for counting the number of warnings, but adopted for Coverity output. Multiple warnings for the same CID are counted as one. Args: content (re.Match): The regex match - ''' + """ match_string = content.group(0).strip() - if not self._is_excluded(match_string) and (content.group('curr') == content.group('max')): + if not self._is_excluded(match_string) and (content.group("curr") == content.group("max")): self.count += 1 self.logger.info(match_string) self.logger.debug(match_string) @@ -241,24 +241,24 @@ def check(self, content): class DoxyChecker(RegexChecker): - name = 'doxygen' + name = "doxygen" pattern = doxy_pattern class SphinxChecker(RegexChecker): - name = 'sphinx' + name = "sphinx" pattern = sphinx_pattern sphinx_deprecation_regex = r"(?m)^(?:(.+?:(?:\d+|None)?):?\s*)?(DEBUG|INFO|WARNING|ERROR|SEVERE|(?:\w+Sphinx\d+Warning)):\s*(.+)$" sphinx_deprecation_regex_in_match = "RemovedInSphinx\\d+Warning" def include_sphinx_deprecation(self): - ''' + """ Adds the pattern for sphinx_deprecation_regex to the list patterns to include and alters the main pattern - ''' + """ self.pattern = re.compile(self.sphinx_deprecation_regex) self.add_patterns([self.sphinx_deprecation_regex_in_match], self.include_patterns) class XMLRunnerChecker(RegexChecker): - name = 'xmlrunner' + name = "xmlrunner" pattern = xmlrunner_pattern diff --git a/src/mlx/warnings/robot_checker.py b/src/mlx/warnings/robot_checker.py index 781f6f1..a88a464 100644 --- a/src/mlx/warnings/robot_checker.py +++ b/src/mlx/warnings/robot_checker.py @@ -9,17 +9,17 @@ class RobotChecker(WarningsChecker): - name = 'robot' + name = "robot" checkers = [] logging_fmt = "{checker.name_repr}: {message}" @property def minimum(self): - ''' Gets the lowest minimum amount of warnings + """Gets the lowest minimum amount of warnings Returns: int: the lowest minimum for warnings - ''' + """ if self.checkers: return min(x.minimum for x in self.checkers) return 0 @@ -31,11 +31,11 @@ def minimum(self, minimum): @property def maximum(self): - ''' Gets the highest minimum amount of warnings + """Gets the highest minimum amount of warnings Returns: int: the highest maximum for warnings - ''' + """ if self.checkers: return max(x.maximum for x in self.checkers) return 0 @@ -46,34 +46,34 @@ def maximum(self, maximum): checker.maximum = maximum def check(self, content): - ''' + """ Function for counting the number of failures in a specific Robot Framework test suite Args: content (str): The content to parse - ''' + """ for checker in self.checkers: checker.check(content) def return_count(self): - ''' Getter function for the amount of warnings found + """Getter function for the amount of warnings found Returns: int: Number of warnings found - ''' + """ self.count = 0 for checker in self.checkers: self.count += checker.return_count() return self.count def return_check_limits(self): - ''' Function for checking whether the warning count is within the configured limits + """Function for checking whether the warning count is within the configured limits Returns: int: 0 if the amount of warnings is within limits, the count of warnings otherwise (or 1 in case of a count of 0 warnings) - ''' + """ count = 0 for checker in self.checkers: count += checker.return_check_limits() @@ -83,25 +83,25 @@ def return_check_limits(self): def parse_config(self, config): self.checkers = [] - check_suite_name = config.get('check_suite_names', True) - for suite_config in config['suites']: - checker = RobotSuiteChecker(suite_config['name'], *self.logging_args, check_suite_name=check_suite_name) + check_suite_name = config.get("check_suite_names", True) + for suite_config in config["suites"]: + checker = RobotSuiteChecker(suite_config["name"], *self.logging_args, check_suite_name=check_suite_name) checker.parse_config(suite_config) self.checkers.append(checker) class RobotSuiteChecker(JUnitChecker): - name = 'robot_sub' + name = "robot_sub" subchecker = True logging_fmt = "{checker.name_repr}: {checker.suite_name_repr:<20} {message}" def __init__(self, suite_name, *logging_args, check_suite_name=False): - ''' Constructor + """Constructor Args: name (str): Name of the test suite to check the results of check_suite_name (bool): Whether to raise an error when no test in suite with given name is found - ''' + """ super().__init__(*logging_args) self.suite_name = suite_name self.check_suite_name = check_suite_name @@ -113,10 +113,10 @@ def suite_name_repr(self): @property def name_repr(self): - return self.name.replace('_sub', '').capitalize() + return self.name.replace("_sub", "").capitalize() def _check_testcase(self, testcase): - """ Handles the check of a test case element by checking if the result is a failure/error. + """Handles the check of a test case element by checking if the result is a failure/error. If it is to be excluded by a configured regex, or the test case does not belong to the suite, 1 is returned. Otherwise, when in verbose/output mode, the suite name and test case name are printed/written along with the @@ -134,7 +134,7 @@ def _check_testcase(self, testcase): return int(self.suite_name and isinstance(testcase.result, (Failure, Error))) def check(self, content): - """ Function for counting the number of JUnit failures in a specific text + """Function for counting the number of JUnit failures in a specific text The test cases with a ``classname`` that does not end with the ``name`` class attribute are ignored. @@ -146,5 +146,5 @@ def check(self, content): """ super().check(content) if not self.is_valid_suite_name and self.check_suite_name: - self.logger.error(f'No suite with name {self.suite_name!r} found. Returning error code -1.') + self.logger.error(f"No suite with name {self.suite_name!r} found. Returning error code -1.") sys.exit(-1) diff --git a/src/mlx/warnings/warnings.py b/src/mlx/warnings/warnings.py index 94275a3..945cb8f 100644 --- a/src/mlx/warnings/warnings.py +++ b/src/mlx/warnings/warnings.py @@ -20,7 +20,7 @@ from .regex_checker import CoverityChecker, DoxyChecker, SphinxChecker, XMLRunnerChecker from .robot_checker import RobotChecker -__version__ = distribution('mlx.warnings').version +__version__ = distribution("mlx.warnings").version LOGGER = logging.getLogger(__name__) @@ -28,12 +28,12 @@ class WarningsPlugin: def __init__(self, cq_enabled=False): - ''' + """ Function for initializing the parsers Args: cq_enabled (bool): optional - enable generation of Code Quality report - ''' + """ self.activated_checkers = {} self.cq_enabled = cq_enabled self.public_checkers = (SphinxChecker, DoxyChecker, JUnitChecker, XMLRunnerChecker, CoverityChecker, @@ -44,7 +44,7 @@ def __init__(self, cq_enabled=False): self.printout = False def activate_checker(self, checker_type, *logging_args): - ''' + """ Activate additional checkers after initialization Args: @@ -52,14 +52,15 @@ def activate_checker(self, checker_type, *logging_args): Return: WarningsChecker: activated checker object - ''' + """ checker = checker_type(*logging_args) - checker.cq_enabled = self.cq_enabled and checker.name in ('doxygen', 'sphinx', 'xmlrunner', 'polyspace', 'coverity') + checker.cq_enabled = self.cq_enabled and checker.name in ("doxygen", "sphinx", "xmlrunner", "polyspace", + "coverity") self.activated_checkers[checker.name] = checker return checker def activate_checker_name(self, name, *args): - ''' + """ Activates checker by name Args: @@ -67,7 +68,7 @@ def activate_checker_name(self, name, *args): Returns: WarningsChecker: activated checker object, or None when no checker with the given name exists - ''' + """ for checker_type in self.public_checkers: if checker_type.name == name: checker = self.activate_checker(checker_type, *args) @@ -76,22 +77,22 @@ def activate_checker_name(self, name, *args): LOGGER.error(f"Checker {name} does not exist") def get_checker(self, name): - ''' Get checker by name + """Get checker by name Args: name (str): checker name Return: checker object (WarningsChecker) - ''' + """ return self.activated_checkers[name] def check(self, content): - ''' + """ Count the number of warnings in a specified content Args: content (str): The content to parse - ''' + """ if self.printout: LOGGER.warning(content) if not self.activated_checkers: @@ -104,12 +105,12 @@ def check(self, content): checker.check(content) def check_logfile(self, file): - ''' + """ Count the number of warnings in a specified content Args: content (_io.TextIOWrapper): The open file to parse - ''' + """ if not self.activated_checkers: LOGGER.error("No checkers activated. Please use activate_checker function") elif "polyspace" in self.activated_checkers: @@ -122,25 +123,25 @@ def check_logfile(self, file): checker.check(content) def configure_maximum(self, maximum): - ''' Configure the maximum amount of warnings for each activated checker + """Configure the maximum amount of warnings for each activated checker Args: maximum (int): maximum amount of warnings allowed - ''' + """ for checker in self.activated_checkers.values(): checker.maximum = maximum def configure_minimum(self, minimum): - ''' Configure the minimum amount of warnings for each activated checker + """Configure the minimum amount of warnings for each activated checker Args: minimum (int): minimum amount of warnings allowed - ''' + """ for checker in self.activated_checkers.values(): checker.minimum = minimum def return_count(self, name=None): - ''' Getter function for the amount of found warnings + """Getter function for the amount of found warnings If the name parameter is set, this function will return the amount of warnings found by that checker. If not, the function will return the sum @@ -151,7 +152,7 @@ def return_count(self, name=None): Returns: int: Amount of found warnings - ''' + """ self.count = 0 if name is None: for checker in self.activated_checkers.values(): @@ -161,7 +162,7 @@ def return_count(self, name=None): return self.count def return_check_limits(self, name=None): - ''' Function for determining the return value of the script + """Function for determining the return value of the script If the name parameter is set, this function will check (and return) the return value of that checker. If not, this function checks whether the @@ -173,7 +174,7 @@ def return_check_limits(self, name=None): Return: int: 0 if the amount of warnings is within limits, the count of warnings otherwise (or 1 in case of a count of 0 warnings) - ''' + """ if name is None: for checker in self.activated_checkers.values(): retval = checker.return_check_limits() @@ -185,24 +186,24 @@ def return_check_limits(self, name=None): return 0 def toggle_printout(self, printout): - ''' Toggle printout of all the parsed content + """Toggle printout of all the parsed content Useful for command input where we want to print content as well Args: printout (bool): True enables the printout, False provides more silent mode - ''' + """ self.printout = printout def config_parser(self, config, *logging_args): - ''' Parsing configuration dict extracted by previously opened JSON or YAML file + """Parsing configuration dict extracted by previously opened JSON or YAML file Args: config (dict/Path): Content or path of configuration file - ''' + """ if isinstance(config, Path): - with open(config, encoding='utf-8') as open_file: - if config.suffix.lower().startswith('.y'): + with open(config, encoding="utf-8") as open_file: + if config.suffix.lower().startswith(".y"): config = YAML().load(open_file) else: config = json.load(open_file) @@ -212,7 +213,7 @@ def config_parser(self, config, *logging_args): if checker_type.name in config: checker_config = config[checker_type.name] try: - if bool(checker_config['enabled']): + if bool(checker_config["enabled"]): checker = self.activate_checker(checker_type, *logging_args) checker.parse_config(checker_config) LOGGER.info(f"{checker.name_repr}: Config parsing completed") @@ -220,56 +221,56 @@ def config_parser(self, config, *logging_args): raise WarningsConfigError(f"Incomplete config. Missing: {err}") from err def write_code_quality_report(self, out_file): - ''' Generates the Code Quality report artifact as a JSON file that implements a subset of the Code Climate spec + """Generates the Code Quality report artifact as a JSON file that implements a subset of the Code Climate spec Args: out_file (str): Location for the output file - ''' + """ results = [] for checker in self.activated_checkers.values(): results.extend(checker.cq_findings) content = json.dumps(results, indent=4, sort_keys=False) Path(out_file).parent.mkdir(parents=True, exist_ok=True) - with open(out_file, 'w', encoding='utf-8', newline='\n') as open_file: + with open(out_file, "w", encoding="utf-8", newline="\n") as open_file: open_file.write(f"{content}\n") def warnings_wrapper(args): - parser = argparse.ArgumentParser(prog='mlx-warnings') - group1 = parser.add_argument_group('Configuration command line options') - group1.add_argument('--coverity', dest='coverity', action='store_true') - group1.add_argument('-d', '--doxygen', dest='doxygen', action='store_true') - group1.add_argument('-j', '--junit', dest='junit', action='store_true') - group1.add_argument('-r', '--robot', dest='robot', action='store_true') - group1.add_argument('-s', '--sphinx', dest='sphinx', action='store_true') - group1.add_argument('-x', '--xmlrunner', dest='xmlrunner', action='store_true') - group1.add_argument('--name', default='', - help='Name of the Robot Framework test suite to check results of') - group1.add_argument('-m', '--maxwarnings', '--max-warnings', type=int, default=0, - help='Maximum amount of warnings accepted') - group1.add_argument('--minwarnings', '--min-warnings', type=int, default=0, - help='Minimum amount of warnings accepted') - group1.add_argument('--exact-warnings', type=int, default=0, - help='Exact amount of warnings expected') - group2 = parser.add_argument_group('Configuration file with options') - group2.add_argument('--config', dest='configfile', action='store', required=False, type=Path, - help='Config file in JSON or YAML format provides toggle of checkers and their limits') - group2.add_argument('--include-sphinx-deprecation', dest='include_sphinx_deprecation', action='store_true', + parser = argparse.ArgumentParser(prog="mlx-warnings") + group1 = parser.add_argument_group("Configuration command line options") + group1.add_argument("--coverity", dest="coverity", action="store_true") + group1.add_argument("-d", "--doxygen", dest="doxygen", action="store_true") + group1.add_argument("-j", "--junit", dest="junit", action="store_true") + group1.add_argument("-r", "--robot", dest="robot", action="store_true") + group1.add_argument("-s", "--sphinx", dest="sphinx", action="store_true") + group1.add_argument("-x", "--xmlrunner", dest="xmlrunner", action="store_true") + group1.add_argument("--name", default="", + help="Name of the Robot Framework test suite to check results of") + group1.add_argument("-m", "--maxwarnings", "--max-warnings", type=int, default=0, + help="Maximum amount of warnings accepted") + group1.add_argument("--minwarnings", "--min-warnings", type=int, default=0, + help="Minimum amount of warnings accepted") + group1.add_argument("--exact-warnings", type=int, default=0, + help="Exact amount of warnings expected") + group2 = parser.add_argument_group("Configuration file with options") + group2.add_argument("--config", dest="configfile", action="store", required=False, type=Path, + help="Config file in JSON or YAML format provides toggle of checkers and their limits") + group2.add_argument("--include-sphinx-deprecation", dest="include_sphinx_deprecation", action="store_true", help="Sphinx checker will include warnings matching (RemovedInSphinx\\d+Warning) regex") - parser.add_argument('-o', '--output', type=Path, - help='Output file that contains all counted warnings') - parser.add_argument('-C', '--code-quality', - help='Output Code Quality report artifact for GitLab CI') - parser.add_argument('-v', '--verbose', dest='verbose', action='store_true') - parser.add_argument('--command', dest='command', action='store_true', - help='Treat program arguments as command to execute to obtain data') - parser.add_argument('--ignore-retval', dest='ignore', action='store_true', - help='Ignore return value of the executed command') - parser.add_argument('--version', action='version', version=f'%(prog)s {__version__}') - parser.add_argument('logfile', nargs='+', help='Logfile (or command) that might contain warnings') - parser.add_argument('flags', nargs=argparse.REMAINDER, - help='Possible not-used flags from above are considered as command flags') + parser.add_argument("-o", "--output", type=Path, + help="Output file that contains all counted warnings") + parser.add_argument("-C", "--code-quality", + help="Output Code Quality report artifact for GitLab CI") + parser.add_argument("-v", "--verbose", dest="verbose", action="store_true") + parser.add_argument("--command", dest="command", action="store_true", + help="Treat program arguments as command to execute to obtain data") + parser.add_argument("--ignore-retval", dest="ignore", action="store_true", + help="Ignore return value of the executed command") + parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}") + parser.add_argument("logfile", nargs="+", help="Logfile (or command) that might contain warnings") + parser.add_argument("flags", nargs=argparse.REMAINDER, + help="Possible not-used flags from above are considered as command flags") args = parser.parse_args(args) code_quality_enabled = bool(args.code_quality) @@ -293,21 +294,21 @@ def warnings_wrapper(args): warnings.config_parser(args.configfile, *logging_args) else: if args.sphinx: - warnings.activate_checker_name('sphinx', *logging_args) + warnings.activate_checker_name("sphinx", *logging_args) if args.doxygen: - warnings.activate_checker_name('doxygen', *logging_args) + warnings.activate_checker_name("doxygen", *logging_args) if args.junit: - warnings.activate_checker_name('junit', *logging_args) + warnings.activate_checker_name("junit", *logging_args) if args.xmlrunner: - warnings.activate_checker_name('xmlrunner', *logging_args) + warnings.activate_checker_name("xmlrunner", *logging_args) if args.coverity: - warnings.activate_checker_name('coverity', *logging_args) + warnings.activate_checker_name("coverity", *logging_args) if args.robot: - robot_checker = warnings.activate_checker_name('robot', *logging_args) + robot_checker = warnings.activate_checker_name("robot", *logging_args) if robot_checker is not None: robot_checker.parse_config({ - 'suites': [{'name': args.name, 'min': 0, 'max': 0}], - 'check_suite_names': True, + "suites": [{"name": args.name, "min": 0, "max": 0}], + "check_suite_names": True, }) if args.exact_warnings: if args.maxwarnings | args.minwarnings: @@ -319,8 +320,8 @@ def warnings_wrapper(args): warnings.configure_maximum(args.maxwarnings) warnings.configure_minimum(args.minwarnings) - if args.include_sphinx_deprecation and 'sphinx' in warnings.activated_checkers.keys(): - warnings.get_checker('sphinx').include_sphinx_deprecation() + if args.include_sphinx_deprecation and "sphinx" in warnings.activated_checkers.keys(): + warnings.get_checker("sphinx").include_sphinx_deprecation() if args.command: if "polyspace" in warnings.activated_checkers: @@ -348,7 +349,7 @@ def warnings_wrapper(args): def warnings_command(warnings, cmd): - ''' Execute command to obtain input for parsing for warnings + """Execute command to obtain input for parsing for warnings Usually log files are output of the commands. To avoid this additional step this function runs a command instead and parses the stderr and stdout of the @@ -363,7 +364,7 @@ def warnings_command(warnings, cmd): Raises: OSError: When program is not installed. - ''' + """ try: LOGGER.info(f"Executing: {cmd}") proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, @@ -389,7 +390,7 @@ def warnings_command(warnings, cmd): def warnings_logfile(warnings, log): - ''' Parse logfile for warnings + """Parse logfile for warnings Args: warnings (WarningsPlugin): Object for warnings where errors should be logged @@ -398,7 +399,7 @@ def warnings_logfile(warnings, log): Return: 0: Log files existed and are parsed successfully 1: Log files did not exist - ''' + """ # args.logfile doesn't necessarily contain wildcards, but just to be safe, we # assume it does, and try to expand them. # This mechanism is put in place to allow wildcards to be passed on even when @@ -421,5 +422,5 @@ def main(): sys.exit(warnings_wrapper(sys.argv[1:])) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/mlx/warnings/warnings_checker.py b/src/mlx/warnings/warnings_checker.py index 932eb27..51e0063 100644 --- a/src/mlx/warnings/warnings_checker.py +++ b/src/mlx/warnings/warnings_checker.py @@ -38,7 +38,7 @@ def filter(self, record: logging.LogRecord) -> bool: class WarningsChecker: - name = 'checker' + name = "checker" subchecker = False logging_fmt = "{checker.name_repr}: {message}" @@ -57,8 +57,8 @@ def __init__(self, verbose, output): self._maximum = 0 self._cq_findings = [] self.cq_enabled = False - self.cq_default_path = '.gitlab-ci.yml' - self._cq_description_template = Template('$description') + self.cq_default_path = ".gitlab-ci.yml" + self._cq_description_template = Template("$description") self.exclude_patterns = [] self.include_patterns = [] self.logging_args = (verbose, output) @@ -90,22 +90,22 @@ def __init__(self, verbose, output): @property def name_repr(self): - return self.name.replace('_sub', '').capitalize() + return self.name.replace("_sub", "").capitalize() @property def cq_findings(self): - ''' List[dict]: list of code quality findings''' + """List[dict]: list of code quality findings""" return self._cq_findings @property def cq_description_template(self): - ''' Template: string.Template instance based on the configured template string ''' + """Template: string.Template instance based on the configured template string""" return self._cq_description_template @cq_description_template.setter def cq_description_template(self, template_obj): try: - template_obj.template = template_obj.substitute(os.environ, description='$description') + template_obj.template = template_obj.substitute(os.environ, description="$description") except KeyError as err: raise WarningsConfigError(f"Failed to find environment variable from configuration value " f"'cq_description_template': {err}") from err @@ -113,11 +113,11 @@ def cq_description_template(self, template_obj): @property def maximum(self): - ''' Getter function for the maximum amount of warnings + """Getter function for the maximum amount of warnings Returns: int: Maximum amount of warnings - ''' + """ return self._maximum @maximum.setter @@ -131,11 +131,11 @@ def maximum(self, maximum): @property def minimum(self): - ''' Getter function for the minimum amount of warnings + """Getter function for the minimum amount of warnings Returns: int: Minimum amount of warnings - ''' + """ return self._minimum @minimum.setter @@ -147,20 +147,20 @@ def minimum(self, minimum): @abc.abstractmethod def check(self, content): - ''' Function for counting the number of warnings in a specific text + """Function for counting the number of warnings in a specific text Args: content (str): The content to parse - ''' + """ return def add_patterns(self, regexes, pattern_container): - ''' Adds regexes as patterns to the specified container + """Adds regexes as patterns to the specified container Args: regexes (list[str]|None): List of regexes to add pattern_container (list[re.Pattern]): Target storage container for patterns - ''' + """ if regexes: if not isinstance(regexes, list): raise TypeError("Expected a list value for exclude key in configuration file; got {}" @@ -169,22 +169,22 @@ def add_patterns(self, regexes, pattern_container): pattern_container.append(re.compile(regex)) def return_count(self): - ''' Getter function for the amount of warnings found + """Getter function for the amount of warnings found Returns: int: Number of warnings found - ''' + """ return self.count def return_check_limits(self): - ''' Function for checking whether the warning count is within the configured limits + """Function for checking whether the warning count is within the configured limits A checker instance with sub-checkers is responsible for printing 'Returning error code X.' when the exit code is not 0. Returns: int: 0 if the amount of warnings is within limits, the count of (the sum of sub-checker) warnings otherwise (or 1 in case of a count of 0 warnings) - ''' + """ if self.count > self._maximum or self.count < self._minimum: return self._return_error_code() elif self._minimum == self._maximum and self.count == self._maximum: @@ -195,11 +195,11 @@ def return_check_limits(self): return 0 def _return_error_code(self): - ''' Function for determining the return code and message on failure + """Function for determining the return code and message on failure Returns: int: The count of warnings (or 1 in case of a count of 0 warnings) - ''' + """ if self.count > self._maximum: error_reason = f"higher than the maximum limit ({self._maximum})" else: @@ -215,17 +215,17 @@ def _return_error_code(self): return error_code def parse_config(self, config): - substitute_envvar(config, {'min', 'max'}) - self.maximum = int(config['max']) - self.minimum = int(config['min']) + substitute_envvar(config, {"min", "max"}) + self.maximum = int(config["max"]) + self.minimum = int(config["min"]) self.add_patterns(config.get("exclude"), self.exclude_patterns) - if 'cq_default_path' in config: - self.cq_default_path = config['cq_default_path'] - if 'cq_description_template' in config: - self.cq_description_template = Template(config['cq_description_template']) + if "cq_default_path" in config: + self.cq_default_path = config["cq_default_path"] + if "cq_description_template" in config: + self.cq_description_template = Template(config["cq_description_template"]) def _is_excluded(self, content): - ''' Checks if the specific text must be excluded based on the configured regexes for exclusion and inclusion. + """Checks if the specific text must be excluded based on the configured regexes for exclusion and inclusion. Inclusion has priority over exclusion. @@ -234,7 +234,7 @@ def _is_excluded(self, content): Returns: bool: True for exclusion, False for inclusion - ''' + """ matching_exclude_pattern = self._search_patterns(content, self.exclude_patterns) if not self._search_patterns(content, self.include_patterns) and matching_exclude_pattern: self.logger.info(f"Excluded {content!r} because of configured regex {matching_exclude_pattern!r}") @@ -243,7 +243,7 @@ def _is_excluded(self, content): @staticmethod def _search_patterns(content, patterns): - ''' Returns the regex of the first pattern that matches specified content, None if nothing matches ''' + """Returns the regex of the first pattern that matches specified content, None if nothing matches""" for pattern in patterns: if pattern.search(content): return pattern.pattern diff --git a/tests/test_config.py b/tests/test_config.py index c304a19..193d2e6 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -15,8 +15,8 @@ XMLRunnerChecker, ) -TEST_IN_DIR = Path(__file__).parent / 'test_in' -logging.getLogger('mlx.warnings.warnings').setLevel(logging.DEBUG) +TEST_IN_DIR = Path(__file__).parent / "test_in" +logging.getLogger("mlx.warnings.warnings").setLevel(logging.DEBUG) class TestConfig(TestCase): @@ -25,55 +25,64 @@ def caplog(self, caplog): self.caplog = caplog def setUp(self): - os.environ['MIN_SPHINX_WARNINGS'] = '0' - os.environ['MAX_SPHINX_WARNINGS'] = '0' + os.environ["MIN_SPHINX_WARNINGS"] = "0" + os.environ["MAX_SPHINX_WARNINGS"] = "0" def tearDown(self): - for var in ('MIN_SPHINX_WARNINGS', 'MAX_SPHINX_WARNINGS'): + for var in ("MIN_SPHINX_WARNINGS", "MAX_SPHINX_WARNINGS"): if var in os.environ: del os.environ[var] def test_configfile_parsing(self): warnings = WarningsPlugin() warnings.config_parser((TEST_IN_DIR / "config_example.json"), False, None) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) - warnings.check('') + warnings.check("") self.assertEqual(warnings.return_count(), 1) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 2) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 2) - warnings.check('ERROR [0.000s]: test_some_error_test (something.anything.somewhere)') + warnings.check("ERROR [0.000s]: test_some_error_test (something.anything.somewhere)") self.assertEqual(warnings.return_count(), 3) def test_configfile_parsing_missing_envvar(self): - del os.environ['MAX_SPHINX_WARNINGS'] + del os.environ["MAX_SPHINX_WARNINGS"] with self.assertRaises(WarningsConfigError) as c_m: warnings = WarningsPlugin() warnings.config_parser((TEST_IN_DIR / "config_example.json"), False, None) self.assertEqual( str(c_m.exception), - "Failed to find environment variable 'MAX_SPHINX_WARNINGS' for configuration value 'max'") + "Failed to find environment variable 'MAX_SPHINX_WARNINGS' for configuration value 'max'", + ) def _helper_exclude(self, warnings): - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 0) - warnings.check('') + warnings.check("") self.assertEqual(warnings.return_count(), 0) - deprecation_warning = 'sphinx/application.py:402: RemovedInSphinx20Warning: app.info() is now deprecated. Use sphinx.util.logging instead.' + deprecation_warning = "sphinx/application.py:402: RemovedInSphinx20Warning: app.info() is now deprecated. "\ + "Use sphinx.util.logging instead." warnings.check(deprecation_warning) self.assertEqual(warnings.return_count(), 0) - toctree_warning = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'" + toctree_warning = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document "\ + "u'installation'" warnings.check(toctree_warning) self.assertEqual(warnings.return_count(), 0) # ignored because of configured "exclude" regex warnings.check("home/bljah/test/index.rst:5: WARNING: this warning should not get excluded") self.assertEqual(warnings.return_count(), 1) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 1) - warnings.check('ERROR [0.000s]: test_some_error_test (something.anything.somewhere)') + warnings.check("ERROR [0.000s]: test_some_error_test (something.anything.somewhere)") self.assertEqual(warnings.return_count(), 1) - excluded_toctree_warning = "Excluded {!r} because of configured regex {!r}".format(toctree_warning, "WARNING: toctree") + excluded_toctree_warning = "Excluded {!r} because of configured regex {!r}".format(toctree_warning, + "WARNING: toctree") self.assertIn("Sphinx: Config parsing completed", self.caplog.messages) self.assertIn(f"{excluded_toctree_warning}", self.caplog.messages) warning_echo = "home/bljah/test/index.rst:5: WARNING: this warning should not get excluded" @@ -92,81 +101,88 @@ def test_configfile_parsing_exclude_yml(self): def test_configfile_parsing_include_priority(self): warnings = WarningsPlugin() warnings.config_parser((TEST_IN_DIR / "config_example_exclude.json"), True, None) - warnings.get_checker('sphinx').include_sphinx_deprecation() - deprecation_warning = 'sphinx/application.py:402: RemovedInSphinx20Warning: app.info() is now deprecated. Use sphinx.util.logging instead.' + warnings.get_checker("sphinx").include_sphinx_deprecation() + deprecation_warning = "sphinx/application.py:402: RemovedInSphinx20Warning: app.info() is now deprecated. "\ + "Use sphinx.util.logging instead." warnings.check(deprecation_warning) self.assertEqual(warnings.return_count(), 1) def test_partial_sphinx_config_parsing(self): warnings = WarningsPlugin() tmpjson = { - 'sphinx': { - 'enabled': True, - 'min': 0, - 'max': 0 + "sphinx": { + "enabled": True, + "min": 0, + "max": 0 } } warnings.config_parser(tmpjson, False, None) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 0) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 0) - warnings.check('ERROR [0.000s]: test_some_error_test (something.anything.somewhere)') + warnings.check("ERROR [0.000s]: test_some_error_test (something.anything.somewhere)") self.assertEqual(warnings.return_count(), 0) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 1) def test_partial_doxygen_config_parsing(self): warnings = WarningsPlugin() tmpjson = { - 'doxygen': { - 'enabled': True, - 'min': 0, - 'max': 0 + "doxygen": { + "enabled": True, + "min": 0, + "max": 0 } } warnings.config_parser(tmpjson, False, None) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 0) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 0) - warnings.check('ERROR [0.000s]: test_some_error_test (something.anything.somewhere)') + warnings.check("ERROR [0.000s]: test_some_error_test (something.anything.somewhere)") self.assertEqual(warnings.return_count(), 0) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) def test_partial_junit_config_parsing(self): warnings = WarningsPlugin() tmpjson = { - 'junit': { - 'enabled': True, - 'min': 0, - 'max': 0 + "junit": { + "enabled": True, + "min": 0, + "max": 0 } } warnings.config_parser(tmpjson, False, None) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 0) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 0) - warnings.check('ERROR [0.000s]: test_some_error_test (something.anything.somewhere)') + warnings.check("ERROR [0.000s]: test_some_error_test (something.anything.somewhere)") self.assertEqual(warnings.return_count(), 0) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 1) def test_exclude_feature_type_error(self): warnings = WarningsPlugin() tmpjson = { - 'junit': { - 'enabled': True, - 'min': 0, - 'max': 0, + "junit": { + "enabled": True, + "min": 0, + "max": 0, "exclude": "able to trace this random failure msg" } } @@ -177,48 +193,49 @@ def test_exclude_feature_type_error(self): def test_partial_junit_config_parsing_exclude_regex(self): warnings = WarningsPlugin() tmpjson = { - 'junit': { - 'enabled': True, - 'min': 0, - 'max': 0, + "junit": { + "enabled": True, + "min": 0, + "max": 0, "exclude": ["able to trace this random failure msg"] } } warnings.config_parser(tmpjson, False, None) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 0) def test_partial_robot_config_parsing_exclude_regex(self): warnings = WarningsPlugin() tmpjson = { - 'robot': { - 'enabled': True, - 'suites': [ + "robot": { + "enabled": True, + "suites": [ { - 'name': 'Suite One', - 'min': 0, - 'max': 0, + "name": "Suite One", + "min": 0, + "max": 0, "exclude": ["does not exist"] # excludes failure in suite }, { - 'name': 'Suite Two', - 'min': 1, - 'max': 1, + "name": "Suite Two", + "min": 1, + "max": 1, "exclude": ["does not exist"] # no match for failure in suite } ] } } warnings.config_parser(tmpjson, True, None) - with open('tests/test_in/robot_double_fail.xml') as xmlfile: + with open("tests/test_in/robot_double_fail.xml") as xmlfile: warnings.check(xmlfile.read()) retval = warnings.return_check_limits() self.assertEqual(warnings.return_count(), 1) self.assertEqual(retval, 0) self.assertEqual( ["Robot: Config parsing completed", - "Excluded 'Directory 'C:\\\\nonexistent' does not exist.' because of configured regex 'does not exist'", + "Excluded 'Directory 'C:\\\\nonexistent' does not exist.' because of configured regex " + "'does not exist'", "Suite One & Suite Two.Suite Two.Another test", "number of warnings (0) is exactly as expected. Well done.", "number of warnings (1) is exactly as expected. Well done."], @@ -228,20 +245,20 @@ def test_partial_robot_config_parsing_exclude_regex(self): def test_partial_robot_config_empty_name(self): warnings = WarningsPlugin() tmpjson = { - 'robot': { - 'enabled': True, - 'suites': [ + "robot": { + "enabled": True, + "suites": [ { - 'name': '', - 'min': 1, - 'max': 1, + "name": "", + "min": 1, + "max": 1, "exclude": ["does not exist"] # excludes 1 out of 2 failures in suites } ] } } warnings.config_parser(tmpjson, True, None) - with open('tests/test_in/robot_double_fail.xml') as xmlfile: + with open("tests/test_in/robot_double_fail.xml") as xmlfile: warnings.check(xmlfile.read()) count = warnings.return_count() self.assertEqual(count, 1) @@ -249,7 +266,8 @@ def test_partial_robot_config_empty_name(self): self.assertEqual( [ "Robot: Config parsing completed", - r"Excluded 'Directory 'C:\\nonexistent' does not exist.' because of configured regex 'does not exist'", + r"Excluded 'Directory 'C:\\nonexistent' does not exist.' because of configured regex " + r"'does not exist'", "Suite One & Suite Two.Suite Two.Another test", "number of warnings (1) is exactly as expected. Well done." ], @@ -259,86 +277,94 @@ def test_partial_robot_config_empty_name(self): def test_partial_xmlrunner_config_parsing(self): warnings = WarningsPlugin() tmpjson = { - 'xmlrunner': { - 'enabled': True, - 'min': 0, - 'max': 0 + "xmlrunner": { + "enabled": True, + "min": 0, + "max": 0 } } warnings.config_parser(tmpjson, False, None) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 0) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 0) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 0) - warnings.check('ERROR [0.000s]: test_some_error_test (something.anything.somewhere)') + warnings.check("ERROR [0.000s]: test_some_error_test (something.anything.somewhere)") self.assertEqual(warnings.return_count(), 1) def test_doxy_junit_options_config_parsing(self): warnings = WarningsPlugin() tmpjson = { - 'doxygen': { - 'enabled': True, - 'min': 0, - 'max': 0 + "doxygen": { + "enabled": True, + "min": 0, + "max": 0 }, - 'junit': { - 'enabled': True, - 'min': 0, - 'max': 0 + "junit": { + "enabled": True, + "min": 0, + "max": 0 } } warnings.config_parser(tmpjson, False, None) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 0) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 2) def test_sphinx_doxy_config_parsing(self): warnings = WarningsPlugin() tmpjson = { - 'sphinx': { - 'enabled': True, - 'min': 0, - 'max': 0 + "sphinx": { + "enabled": True, + "min": 0, + "max": 0 }, - 'doxygen': { - 'enabled': True, - 'min': 0, - 'max': 0 + "doxygen": { + "enabled": True, + "min": 0, + "max": 0 } } warnings.config_parser(tmpjson, False, None) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 0) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 2) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 2) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 3) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 4) def test_sphinx_config_max(self): warnings = WarningsPlugin() tmpjson = { - 'sphinx': { - 'enabled': True, - 'min': 0, - 'max': 5 + "sphinx": { + "enabled": True, + "min": 0, + "max": 5 } } @@ -348,10 +374,10 @@ def test_sphinx_config_max(self): def test_doxygen_config_max(self): warnings = WarningsPlugin() tmpjson = { - 'doxygen': { - 'enabled': True, - 'min': 0, - 'max': 5 + "doxygen": { + "enabled": True, + "min": 0, + "max": 5 } } @@ -361,10 +387,10 @@ def test_doxygen_config_max(self): def test_junit_config_max(self): warnings = WarningsPlugin() tmpjson = { - 'junit': { - 'enabled': True, - 'min': 0, - 'max': 5 + "junit": { + "enabled": True, + "min": 0, + "max": 5 } } @@ -374,10 +400,10 @@ def test_junit_config_max(self): def test_xmlrunner_config_max(self): warnings = WarningsPlugin() tmpjson = { - 'xmlrunner': { - 'enabled': True, - 'min': 0, - 'max': 5 + "xmlrunner": { + "enabled": True, + "min": 0, + "max": 5 } } @@ -387,43 +413,43 @@ def test_xmlrunner_config_max(self): def test_all_config_max(self): warnings = WarningsPlugin() tmpjson = { - 'sphinx': { - 'enabled': True, - 'min': 0, - 'max': 4 + "sphinx": { + "enabled": True, + "min": 0, + "max": 4 }, - 'doxygen': { - 'enabled': True, - 'min': 0, - 'max': 5 + "doxygen": { + "enabled": True, + "min": 0, + "max": 5 }, - 'junit': { - 'enabled': True, - 'min': 0, - 'max': 6 + "junit": { + "enabled": True, + "min": 0, + "max": 6 }, - 'xmlrunner': { - 'enabled': True, - 'min': 0, - 'max': 6 + "xmlrunner": { + "enabled": True, + "min": 0, + "max": 6 }, - 'robot': { - 'enabled': True, - 'suites': [ + "robot": { + "enabled": True, + "suites": [ { - 'name': 'dummy1', - 'min': 5, - 'max': 7, + "name": "dummy1", + "min": 5, + "max": 7, }, { - 'name': 'dummy2', - 'min': 1, - 'max': 9, + "name": "dummy2", + "min": 1, + "max": 9, }, { - 'name': 'dummy3', - 'min': 2, - 'max': 2, + "name": "dummy3", + "min": 2, + "max": 2, } ] } @@ -439,10 +465,10 @@ def test_all_config_max(self): def test_sphinx_config_min(self): warnings = WarningsPlugin() tmpjson = { - 'sphinx': { - 'enabled': True, - 'min': 5, - 'max': 7 + "sphinx": { + "enabled": True, + "min": 5, + "max": 7 } } @@ -452,10 +478,10 @@ def test_sphinx_config_min(self): def test_doxygen_config_min(self): warnings = WarningsPlugin() tmpjson = { - 'doxygen': { - 'enabled': True, - 'min': 5, - 'max': 7 + "doxygen": { + "enabled": True, + "min": 5, + "max": 7 } } @@ -465,10 +491,10 @@ def test_doxygen_config_min(self): def test_junit_config_min(self): warnings = WarningsPlugin() tmpjson = { - 'junit': { - 'enabled': True, - 'min': 5, - 'max': 7 + "junit": { + "enabled": True, + "min": 5, + "max": 7 } } @@ -478,10 +504,10 @@ def test_junit_config_min(self): def test_xmlrunner_config_min(self): warnings = WarningsPlugin() tmpjson = { - 'xmlrunner': { - 'enabled': True, - 'min': 5, - 'max': 7 + "xmlrunner": { + "enabled": True, + "min": 5, + "max": 7 } } @@ -491,43 +517,43 @@ def test_xmlrunner_config_min(self): def test_all_config_min(self): warnings = WarningsPlugin() tmpjson = { - 'sphinx': { - 'enabled': True, - 'min': 4, - 'max': 7 + "sphinx": { + "enabled": True, + "min": 4, + "max": 7 }, - 'doxygen': { - 'enabled': True, - 'min': 3, - 'max': 7 + "doxygen": { + "enabled": True, + "min": 3, + "max": 7 }, - 'junit': { - 'enabled': True, - 'min': 5, - 'max': 7 + "junit": { + "enabled": True, + "min": 5, + "max": 7 }, - 'xmlrunner': { - 'enabled': True, - 'min': 5, - 'max': 7 + "xmlrunner": { + "enabled": True, + "min": 5, + "max": 7 }, - 'robot': { - 'enabled': True, - 'suites': [ + "robot": { + "enabled": True, + "suites": [ { - 'name': 'dummy1', - 'min': 5, - 'max': 7, + "name": "dummy1", + "min": 5, + "max": 7, }, { - 'name': 'dummy2', - 'min': 1, - 'max': 9, + "name": "dummy2", + "min": 1, + "max": 9, }, { - 'name': 'dummy3', - 'min': 2, - 'max': 2, + "name": "dummy3", + "min": 2, + "max": 2, } ] } @@ -543,23 +569,23 @@ def test_all_config_min(self): def test_invalid_config(self): warnings = WarningsPlugin() tmpjson = { - 'robot': { - 'enabled': True, - 'suites': [ + "robot": { + "enabled": True, + "suites": [ { - 'name': '', - 'min': 5, - 'max': 7, + "name": "", + "min": 5, + "max": 7, }, { - 'name': 'dummy2', - 'min': 10, - 'max': 9, + "name": "dummy2", + "min": 10, + "max": 9, }, { - 'name': 'dummy3', - 'min': 2, - 'max': 2, + "name": "dummy3", + "min": 2, + "max": 2, } ] } @@ -567,4 +593,4 @@ def test_invalid_config(self): with self.assertRaises(ValueError) as c_m: warnings.config_parser(tmpjson, False, None) self.assertEqual(str(c_m.exception), - 'Invalid argument: minimum limit must be lower than maximum limit (9); cannot set 10.') + "Invalid argument: minimum limit must be lower than maximum limit (9); cannot set 10.") diff --git a/tests/test_coverity.py b/tests/test_coverity.py index 260f482..a8e37e8 100644 --- a/tests/test_coverity.py +++ b/tests/test_coverity.py @@ -8,8 +8,8 @@ from mlx.warnings import Finding, WarningsPlugin, warnings_wrapper -TEST_IN_DIR = Path(__file__).parent / 'test_in' -TEST_OUT_DIR = Path(__file__).parent / 'test_out' +TEST_IN_DIR = Path(__file__).parent / "test_in" +TEST_OUT_DIR = Path(__file__).parent / "test_out" def ordered(obj): @@ -34,36 +34,43 @@ def caplog(self, caplog): def setUp(self): Finding.fingerprints = {} self.warnings = WarningsPlugin() - self.warnings.activate_checker_name('coverity', True, None) + self.warnings.activate_checker_name("coverity", True, None) def test_no_warning_normal_text(self): - dut = 'This should not be treated as warning' + dut = "This should not be treated as warning" self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 0) def test_no_warning_but_still_command_output(self): - dut = 'src/something/src/somefile.c:82: 1. misra_violation: Essential type of the left hand operand "0U" (unsigned) is not the same as that of the right operand "1U"(signed).' + dut = "src/something/src/somefile.c:82: 1. misra_violation: Essential type of the left hand operand \"0U\" "\ + "(unsigned) is not the same as that of the right operand \"1U\"(signed)." self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 0) def test_single_warning(self): - dut = '/src/somefile.c:82: CID 113396 (#2 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27.' + dut = "/src/somefile.c:82: CID 113396 (#2 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): "\ + "Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27." self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 1) self.assertEqual([f"{dut}"], self.caplog.messages) def test_single_warning_count_one(self): - dut1 = '/src/somefile.c:80: CID 113396 (#1 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27.' - dut2 = '/src/somefile.c:82: CID 113396 (#2 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27.' + dut1 = "/src/somefile.c:80: CID 113396 (#1 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): "\ + "Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27." + dut2 = "/src/somefile.c:82: CID 113396 (#2 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): "\ + "Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27." self.warnings.check(dut1) self.warnings.check(dut2) self.assertEqual(self.warnings.return_count(), 1) self.assertEqual([f"{dut2}"], self.caplog.messages) def test_single_warning_real_output(self): - dut1 = '/src/somefile.c:80: CID 113396 (#1 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27.' - dut2 = '/src/somefile.c:82: CID 113396 (#2 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27.' - dut3 = 'src/something/src/somefile.c:82: 1. misra_violation: Essential type of the left hand operand "0U" (unsigned) is not the same as that of the right operand "1U"(signed).' + dut1 = "/src/somefile.c:80: CID 113396 (#1 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): "\ + "Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27." + dut2 = "/src/somefile.c:82: CID 113396 (#2 of 2): Coding standard violation (MISRA C-2012 Rule 10.1): "\ + "Unclassified, Unspecified, Undecided, owner is nobody, first detected on 2017-07-27." + dut3 = "src/something/src/somefile.c:82: 1. misra_violation: Essential type of the left hand operand \"0U\" "\ + "(unsigned) is not the same as that of the right operand \"1U\"(signed)." self.warnings.check(dut1) self.warnings.check(dut2) self.warnings.check(dut3) @@ -71,25 +78,25 @@ def test_single_warning_real_output(self): self.assertEqual([f"{dut2}"], self.caplog.messages) def test_code_quality_without_config(self): - filename = 'coverity_cq.json' + filename = "coverity_cq.json" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--coverity', - '--code-quality', out_file, - str(TEST_IN_DIR / 'coverity_full.txt'), + "--coverity", + "--code-quality", out_file, + str(TEST_IN_DIR / "coverity_full.txt"), ]) self.assertEqual(11, retval) self.assertTrue(filecmp.cmp(out_file, ref_file)) def test_code_quality_with_config_pass(self): - filename = 'coverity_cq.json' + filename = "coverity_cq.json" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--code-quality', out_file, - '--config', str(TEST_IN_DIR / 'config_example_coverity.yml'), - str(TEST_IN_DIR / 'coverity_full.txt'), + "--code-quality", out_file, + "--config", str(TEST_IN_DIR / "config_example_coverity.yml"), + str(TEST_IN_DIR / "coverity_full.txt"), ]) self.assertEqual(0, retval) self.assertTrue(filecmp.cmp(out_file, ref_file)) @@ -99,13 +106,13 @@ def test_code_quality_with_config_pass(self): "MIN_FALSE_POSITIVE": "0", "MAX_FALSE_POSITIVE": "1", }) def test_code_quality_with_config_fail(self): - filename = 'coverity_cq.json' + filename = "coverity_cq.json" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--code-quality', out_file, - '--config', str(TEST_IN_DIR / 'config_example_coverity.yml'), - str(TEST_IN_DIR / 'coverity_full.txt'), + "--code-quality", out_file, + "--config", str(TEST_IN_DIR / "config_example_coverity.yml"), + str(TEST_IN_DIR / "coverity_full.txt"), ]) self.assertEqual(10, retval) # 8 + 2 not within range 6 and 7 self.assertTrue(filecmp.cmp(out_file, ref_file)) diff --git a/tests/test_doxygen.py b/tests/test_doxygen.py index ee92f05..d3a6aec 100644 --- a/tests/test_doxygen.py +++ b/tests/test_doxygen.py @@ -12,23 +12,25 @@ def caplog(self, caplog): def setUp(self): self.warnings = WarningsPlugin() - self.warnings.activate_checker_name('doxygen', True, None) + self.warnings.activate_checker_name("doxygen", True, None) def test_no_warning(self): - dut = 'This should not be treated as warning' + dut = "This should not be treated as warning" self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 0) def test_single_warning(self): - dut = 'testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"' + dut = "testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match "\ + "old title \"Some freaky test functions\"" self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 1) self.assertEqual([f"{dut}"], self.caplog.messages) def test_single_warning_mixed(self): - dut1 = 'This1 should not be treated as warning' - dut2 = 'testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"' - dut3 = 'This should not be treated as warning2' + dut1 = "This1 should not be treated as warning" + dut2 = "testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match "\ + "old title \"Some freaky test functions\"" + dut3 = "This should not be treated as warning2" self.warnings.check(dut1) self.warnings.check(dut2) self.warnings.check(dut3) @@ -36,8 +38,10 @@ def test_single_warning_mixed(self): self.assertEqual([f"{dut2}"], self.caplog.messages) def test_multiline(self): - duterr1 = "testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match old title \"Some freaky test functions\"\n" - duterr2 = "testfile.c:8: warning: group test: ignoring title \"Some test functions\" that does not match old title \"Some freaky test functions\"\n" + duterr1 = "testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match "\ + "old title \"Some freaky test functions\"\n" + duterr2 = "testfile.c:8: warning: group test: ignoring title \"Some test functions\" that does not match "\ + "old title \"Some freaky test functions\"\n" dut = "This1 should not be treated as warning\n" dut += duterr1 dut += "This should not be treated as warning2\n" @@ -47,8 +51,10 @@ def test_multiline(self): self.assertEqual([f"{duterr1.strip()}", f"{duterr2.strip()}"], self.caplog.messages) def test_git_warning(self): - duterr1 = "testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match old title \"Some freaky test functions\"\n" - duterr2 = "testfile.c:8: warning: group test: ignoring title \"Some test functions\" that does not match old title \"Some freaky test functions\"\n" + duterr1 = "testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match "\ + "old title \"Some freaky test functions\"\n" + duterr2 = "testfile.c:8: warning: group test: ignoring title \"Some test functions\" that does not match "\ + "old title \"Some freaky test functions\"\n" dut = "warning: notes ref refs/notes/review is invalid should not be treated as warning\n" dut += duterr1 dut += "This should not be treated as warning2\n" @@ -58,16 +64,17 @@ def test_git_warning(self): self.assertEqual([f"{duterr1.strip()}", f"{duterr2.strip()}"], self.caplog.messages) def test_sphinx_deprecation_warning(self): - duterr1 = "testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match old title \"Some freaky test functions\"\n" - dut = "/usr/local/lib/python3.5/dist-packages/sphinx/application.py:402: RemovedInSphinx20Warning: app.info() "\ - "is now deprecated. Use sphinx.util.logging instead. RemovedInSphinx20Warning)\n" + duterr1 = "testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match "\ + "old title \"Some freaky test functions\"\n" + dut = "/usr/local/lib/python3.5/dist-packages/sphinx/application.py:402: RemovedInSphinx20Warning: "\ + "app.info() is now deprecated. Use sphinx.util.logging instead. RemovedInSphinx20Warning)\n" dut += duterr1 self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 1) self.assertEqual([f"{duterr1.strip()}"], self.caplog.messages) def test_doxygen_warnings_txt(self): - dut_file = 'tests/test_in/doxygen_warnings.txt' + dut_file = "tests/test_in/doxygen_warnings.txt" with open(dut_file) as open_file: self.warnings.check(open_file.read()) self.assertEqual(self.warnings.return_count(), 22) diff --git a/tests/test_integration.py b/tests/test_integration.py index 88991b6..9c94fc8 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -9,8 +9,8 @@ from mlx.warnings import Finding, WarningsConfigError, exceptions, warnings_wrapper -TEST_IN_DIR = Path(__file__).parent / 'test_in' -TEST_OUT_DIR = Path(__file__).parent / 'test_out' +TEST_IN_DIR = Path(__file__).parent / "test_in" +TEST_OUT_DIR = Path(__file__).parent / "test_out" def reset_logging(): @@ -41,19 +41,19 @@ def setUp(self): TEST_OUT_DIR.mkdir() def tearDown(self): - for var in ('FIRST_ENVVAR', 'SECOND_ENVVAR', 'MIN_SPHINX_WARNINGS', 'MAX_SPHINX_WARNINGS'): + for var in ("FIRST_ENVVAR", "SECOND_ENVVAR", "MIN_SPHINX_WARNINGS", "MAX_SPHINX_WARNINGS"): if var in os.environ: del os.environ[var] reset_logging() def test_help(self): with self.assertRaises(SystemExit) as ex: - warnings_wrapper(['--help']) + warnings_wrapper(["--help"]) self.assertEqual(0, ex.exception.code) def test_version(self): with self.assertRaises(SystemExit) as ex: - warnings_wrapper(['--version']) + warnings_wrapper(["--version"]) self.assertEqual(0, ex.exception.code) def test_no_parser_selection(self): @@ -62,7 +62,7 @@ def test_no_parser_selection(self): self.assertEqual(2, ex.exception.code) def test_verbose(self): - retval = warnings_wrapper(['--verbose', '--junit', 'tests/test_in/junit_single_fail.xml']) + retval = warnings_wrapper(["--verbose", "--junit", "tests/test_in/junit_single_fail.xml"]) self.assertEqual( [ "JUnit: test_warn_plugin_single_fail.myfirstfai1ure", @@ -72,7 +72,7 @@ def test_verbose(self): self.assertEqual(1, retval) def test_no_verbose(self): - retval = warnings_wrapper(['--junit', 'tests/test_in/junit_single_fail.xml']) + retval = warnings_wrapper(["--junit", "tests/test_in/junit_single_fail.xml"]) self.assertEqual( ["JUnit: number of warnings (1) is higher than the maximum limit (0). Returning error code 1."], self.stderr_lines @@ -83,182 +83,193 @@ def test_no_verbose(self): junit_warning_cnt = 3 def test_single_argument(self): - retval = warnings_wrapper(['--junit', 'tests/test_in/junit_single_fail.xml']) + retval = warnings_wrapper(["--junit", "tests/test_in/junit_single_fail.xml"]) self.assertEqual(1, retval) def test_single_defect_coverity(self): - retval = warnings_wrapper(['--coverity', 'tests/test_in/coverity_single_defect.txt']) + retval = warnings_wrapper(["--coverity", "tests/test_in/coverity_single_defect.txt"]) self.assertEqual(1, retval) def test_two_arguments(self): - retval = warnings_wrapper(['--junit', 'tests/test_in/junit_single_fail.xml', 'tests/test_in/junit_double_fail.xml']) + retval = warnings_wrapper(["--junit", "tests/test_in/junit_single_fail.xml", + "tests/test_in/junit_double_fail.xml"]) self.assertEqual(1 + 2, retval) def test_non_existing_logfile(self): - retval = warnings_wrapper(['--sphinx', 'not-exist.log']) + retval = warnings_wrapper(["--sphinx", "not-exist.log"]) self.assertEqual(1, retval) - retval = warnings_wrapper(['--xmlrunner', 'not-exist.log']) + retval = warnings_wrapper(["--xmlrunner", "not-exist.log"]) self.assertEqual(1, retval) def test_single_command_argument(self): - retval = warnings_wrapper(['--junit', '--command', 'cat', 'tests/test_in/junit_single_fail.xml']) + retval = warnings_wrapper(["--junit", "--command", "cat", "tests/test_in/junit_single_fail.xml"]) self.assertEqual(1, retval) def test_two_command_arguments(self): - retval = warnings_wrapper(['--sphinx', '--command', 'cat', 'tests/test_in/sphinx_single_warning.txt', 'tests/test_in/sphinx_double_warning.txt']) + retval = warnings_wrapper(["--sphinx", "--command", "cat", "tests/test_in/sphinx_single_warning.txt", + "tests/test_in/sphinx_double_warning.txt"]) self.assertEqual(1 + 2, retval) def test_command_with_its_own_arguments(self): - retval = warnings_wrapper(['--sphinx', '--command', 'cat', '-A', 'tests/test_in/sphinx_single_warning.txt', 'tests/test_in/sphinx_double_warning.txt']) + retval = warnings_wrapper(["--sphinx", "--command", "cat", "-A", "tests/test_in/sphinx_single_warning.txt", + "tests/test_in/sphinx_double_warning.txt"]) self.assertEqual(1 + 2, retval) def test_command_to_stderr(self): - retval = warnings_wrapper(['--sphinx', '--command', 'cat', 'tests/test_in/sphinx_single_warning.txt', '>&2']) + retval = warnings_wrapper(["--sphinx", "--command", "cat", "tests/test_in/sphinx_single_warning.txt", ">&2"]) self.assertEqual(1, retval) def test_faulty_command(self): with self.assertRaises(OSError): - warnings_wrapper(['--sphinx', '--command', 'blahahahaha', 'tests/test_in/sphinx_single_warning.txt']) + warnings_wrapper(["--sphinx", "--command", "blahahahaha", "tests/test_in/sphinx_single_warning.txt"]) def test_command_revtal_err(self): - retval = warnings_wrapper(['--sphinx', '--command', 'false']) + retval = warnings_wrapper(["--sphinx", "--command", "false"]) self.assertEqual(1, retval) def test_command_revtal_err_supress(self): - retval = warnings_wrapper(['--sphinx', '--ignore-retval', '--command', 'false']) + retval = warnings_wrapper(["--sphinx", "--ignore-retval", "--command", "false"]) self.assertEqual(0, retval) def test_wildcarded_arguments(self): # note: no shell expansion simulation (e.g. as in windows) - retval = warnings_wrapper(['--junit', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--junit", "tests/test_in/junit*.xml"]) self.assertEqual(self.junit_warning_cnt, retval) def test_max(self): - retval = warnings_wrapper(['--junit', '--maxwarnings', '2', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--junit", "--maxwarnings", "2", "tests/test_in/junit*.xml"]) self.assertEqual(self.junit_warning_cnt, retval) def test_max_but_still_ok(self): - retval = warnings_wrapper(['--junit', '--maxwarnings', '100', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--junit", "--maxwarnings", "100", "tests/test_in/junit*.xml"]) self.assertEqual(0, retval) def test_min(self): - retval = warnings_wrapper(['--junit', '--maxwarnings', '100', '--minwarnings', '100', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--junit", "--maxwarnings", "100", "--minwarnings", "100", + "tests/test_in/junit*.xml"]) self.assertEqual(self.junit_warning_cnt, retval) def test_min_but_still_ok(self): - retval = warnings_wrapper(['--junit', '--max-warnings', '100', '--min-warnings', '2', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--junit", "--max-warnings", "100", "--min-warnings", "2", + "tests/test_in/junit*.xml"]) self.assertEqual(0, retval) def test_exact_sphinx(self): - retval = warnings_wrapper(['--sphinx', '--exact-warnings', '2', 'tests/test_in/sphinx_double_warning.txt']) + retval = warnings_wrapper(["--sphinx", "--exact-warnings", "2", "tests/test_in/sphinx_double_warning.txt"]) self.assertEqual(0, retval) def test_exact_too_few(self): - retval = warnings_wrapper(['--sphinx', '--exact-warnings', '3', 'tests/test_in/sphinx_double_warning.txt']) + retval = warnings_wrapper(["--sphinx", "--exact-warnings", "3", "tests/test_in/sphinx_double_warning.txt"]) self.assertEqual(2, retval) def test_exact_too_many(self): - retval = warnings_wrapper(['--sphinx', '--exact-warnings', '1', 'tests/test_in/sphinx_double_warning.txt']) + retval = warnings_wrapper(["--sphinx", "--exact-warnings", "1", "tests/test_in/sphinx_double_warning.txt"]) self.assertEqual(2, retval) def test_exact_junit(self): - retval = warnings_wrapper(['--junit', '--exact-warnings', '3', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--junit", "--exact-warnings", "3", "tests/test_in/junit*.xml"]) self.assertEqual(0, retval) def test_exact_with_min(self): with self.assertRaises(SystemExit): - warnings_wrapper(['--junit', '--exact-warnings', '3', '--min-warnings', '3', 'tests/test_in/junit*.xml']) + warnings_wrapper(["--junit", "--exact-warnings", "3", "--min-warnings", "3", "tests/test_in/junit*.xml"]) def test_exact_with_max(self): with self.assertRaises(SystemExit): - warnings_wrapper(['--junit', '--exact-warnings', '3', '--max-warnings', '3', 'tests/test_in/junit*.xml']) + warnings_wrapper(["--junit", "--exact-warnings", "3", "--max-warnings", "3", "tests/test_in/junit*.xml"]) def test_configfile_ok(self): - os.environ['MIN_SPHINX_WARNINGS'] = '0' - os.environ['MAX_SPHINX_WARNINGS'] = '0' - retval = warnings_wrapper(['--config', 'tests/test_in/config_example.json', 'tests/test_in/junit_single_fail.xml']) + os.environ["MIN_SPHINX_WARNINGS"] = "0" + os.environ["MAX_SPHINX_WARNINGS"] = "0" + retval = warnings_wrapper(["--config", "tests/test_in/config_example.json", + "tests/test_in/junit_single_fail.xml"]) self.assertEqual(0, retval) def test_configfile_exclude_commandline(self): with self.assertRaises(SystemExit) as ex: - warnings_wrapper(['--config', 'tests/test_in/config_example.json', '--junit', 'tests/test_in/junit_single_fail.xml']) + warnings_wrapper(["--config", "tests/test_in/config_example.json", "--junit", + "tests/test_in/junit_single_fail.xml"]) self.assertEqual(2, ex.exception.code) def test_sphinx_deprecation(self): - retval = warnings_wrapper(['--sphinx', 'tests/test_in/sphinx_double_deprecation_warning.txt']) + retval = warnings_wrapper(["--sphinx", "tests/test_in/sphinx_double_deprecation_warning.txt"]) self.assertEqual(0, retval) def test_exclude_sphinx_deprecation(self): - retval = warnings_wrapper(['--sphinx', '--include-sphinx-deprecation', 'tests/test_in/sphinx_double_deprecation_warning.txt']) + retval = warnings_wrapper(["--sphinx", "--include-sphinx-deprecation", + "tests/test_in/sphinx_double_deprecation_warning.txt"]) self.assertEqual(2, retval) def test_ignore_sphinx_deprecation_flag(self): - retval = warnings_wrapper(['--junit', '--include-sphinx-deprecation', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--junit", "--include-sphinx-deprecation", "tests/test_in/junit*.xml"]) self.assertEqual(self.junit_warning_cnt, retval) def test_multiple_checkers_ret_val(self): - retval = warnings_wrapper(['--sphinx', '--junit', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--sphinx", "--junit", "tests/test_in/junit*.xml"]) self.assertEqual(self.junit_warning_cnt, retval) def test_non_zero_ret_val_on_failure(self): - retval = warnings_wrapper(['--sphinx', '--exact-warnings', '2', 'tests/test_in/junit*.xml']) + retval = warnings_wrapper(["--sphinx", "--exact-warnings", "2", "tests/test_in/junit*.xml"]) self.assertEqual(self.min_ret_val_on_failure, retval) def test_various_sphinx_warnings(self): - ''' Use the output log of the example documentation of mlx.traceability as input. + """Use the output log of the example documentation of mlx.traceability as input. The input file contains 18 Sphinx warnings, but exactly 19 are required to pass. The number of warnings (18) must be returned as return code. - ''' + """ retval = warnings_wrapper([ - '--sphinx', - '--exact-warnings', '19', - 'tests/test_in/sphinx_traceability_output.txt', + "--sphinx", + "--exact-warnings", "19", + "tests/test_in/sphinx_traceability_output.txt", ]) + self.assertEqual(18, retval) def test_robot_with_name_arg(self): - retval = warnings_wrapper(['--robot', '--name', "Suite Two", 'tests/test_in/robot_double_fail.xml']) + retval = warnings_wrapper(["--robot", "--name", "Suite Two", "tests/test_in/robot_double_fail.xml"]) self.assertEqual(1, retval) def test_robot_default_name_arg(self): - ''' If no suite name is configured, all suites must be taken into account ''' - retval = warnings_wrapper(['--robot', 'tests/test_in/robot_double_fail.xml']) + """If no suite name is configured, all suites must be taken into account""" + retval = warnings_wrapper(["--robot", "tests/test_in/robot_double_fail.xml"]) self.assertEqual(2, retval) def test_robot_verbose(self): - ''' If no suite name is configured, all suites must be taken into account ''' + """If no suite name is configured, all suites must be taken into account""" retval = warnings_wrapper([ - '--verbose', - '--robot', - '--name', 'Suite Two', - 'tests/test_in/robot_double_fail.xml', + "--verbose", + "--robot", + "--name", "Suite Two", + "tests/test_in/robot_double_fail.xml", ]) + self.assertEqual(1, retval) self.assertEqual( ["Robot: suite 'Suite Two' Suite One & Suite Two.Suite Two.Another test", "Robot: suite 'Suite Two' number of warnings (1) is higher than the maximum limit (0).", - 'Robot: Returning error code 1.'], + "Robot: Returning error code 1."], self.stderr_lines) # TODO def test_robot_config(self): - os.environ['MIN_ROBOT_WARNINGS'] = '0' - os.environ['MAX_ROBOT_WARNINGS'] = '0' + os.environ["MIN_ROBOT_WARNINGS"] = "0" + os.environ["MAX_ROBOT_WARNINGS"] = "0" retval = warnings_wrapper([ - '--config', - 'tests/test_in/config_example_robot.json', - 'tests/test_in/robot_double_fail.xml', + "--config", + "tests/test_in/config_example_robot.json", + "tests/test_in/robot_double_fail.xml", ]) + self.assertEqual( ["Robot: suite 'Suite One' number of warnings (1) is between limits 0 and 1. Well done.", - 'Robot: all test suites number of warnings (2) is higher than the maximum limit (1).', + "Robot: all test suites number of warnings (2) is higher than the maximum limit (1).", "Robot: suite 'Suite Two' number of warnings (1) is between limits 1 and 2. Well done.", "Robot: suite 'b4d su1te name' number of warnings (0) is exactly as expected. Well done.", - 'Robot: Returning error code 2.'], + "Robot: Returning error code 2."], self.stderr_lines ) self.assertEqual(2, retval) - for var in ('MIN_ROBOT_WARNINGS', 'MAX_ROBOT_WARNINGS'): + for var in ("MIN_ROBOT_WARNINGS", "MAX_ROBOT_WARNINGS"): if var in os.environ: del os.environ[var] @@ -266,9 +277,9 @@ def test_robot_config_check_names(self): self.maxDiff = None with self.assertRaises(SystemExit) as cm_err: warnings_wrapper([ - '--config', - 'tests/test_in/config_example_robot_invalid_suite.json', - 'tests/test_in/robot_double_fail.xml', + "--config", + "tests/test_in/config_example_robot_invalid_suite.json", + "tests/test_in/robot_double_fail.xml", ]) self.assertEqual( ["Robot: suite 'b4d su1te name' No suite with name 'b4d su1te name' found. Returning error code -1."], @@ -278,89 +289,85 @@ def test_robot_config_check_names(self): def test_robot_cli_check_name(self): self.maxDiff = None with self.assertRaises(SystemExit) as cm_err: - warnings_wrapper(['--verbose', '--robot', '--name', 'Inv4lid Name', - 'tests/test_in/robot_double_fail.xml']) - self.assertEqual( - ["Robot: suite 'Inv4lid Name' No suite with name 'Inv4lid Name' found. Returning error code -1."], - self.stderr_lines) + warnings_wrapper(["--verbose", "--robot", "--name", "Inv4lid Name", "tests/test_in/robot_double_fail.xml"]) self.assertEqual(cm_err.exception.code, -1) def test_output_file_sphinx(self): - filename = 'sphinx_double_deprecation_warning_summary.txt' + filename = "sphinx_double_deprecation_warning_summary.txt" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) - retval = warnings_wrapper(['--sphinx', '--include-sphinx-deprecation', '-o', out_file, - 'tests/test_in/sphinx_double_deprecation_warning.txt']) + retval = warnings_wrapper(["--sphinx", "--include-sphinx-deprecation", "-o", out_file, + "tests/test_in/sphinx_double_deprecation_warning.txt"]) self.assertEqual(2, retval) self.assertTrue(filecmp.cmp(out_file, ref_file)) def test_output_file_robot_basic(self): - filename = 'robot_double_fail_summary.txt' + filename = "robot_double_fail_summary.txt" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--output', out_file, - '-r', - 'tests/test_in/robot_double_fail.xml', + "--output", out_file, + "-r", + "tests/test_in/robot_double_fail.xml", ]) self.assertEqual(2, retval) - self.assertTrue(filecmp.cmp(out_file, ref_file), f'{out_file} differs from {ref_file}') + self.assertTrue(filecmp.cmp(out_file, ref_file), f"{out_file} differs from {ref_file}") def test_output_file_robot_config(self): - os.environ['MIN_ROBOT_WARNINGS'] = '0' - os.environ['MAX_ROBOT_WARNINGS'] = '0' - filename = 'robot_double_fail_config_summary.txt' + os.environ["MIN_ROBOT_WARNINGS"] = "0" + os.environ["MAX_ROBOT_WARNINGS"] = "0" + filename = "robot_double_fail_config_summary.txt" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--output', out_file, - '--config', 'tests/test_in/config_example_robot.json', - 'tests/test_in/robot_double_fail.xml', + "--output", out_file, + "--config", "tests/test_in/config_example_robot.json", + "tests/test_in/robot_double_fail.xml", ]) self.assertEqual(2, retval) - self.assertTrue(filecmp.cmp(out_file, ref_file), f'{out_file} differs from {ref_file}') - for var in ('MIN_ROBOT_WARNINGS', 'MAX_ROBOT_WARNINGS'): + self.assertTrue(filecmp.cmp(out_file, ref_file), f"{out_file} differs from {ref_file}") + for var in ("MIN_ROBOT_WARNINGS", "MAX_ROBOT_WARNINGS"): if var in os.environ: del os.environ[var] def test_output_file_junit(self): - filename = 'junit_double_fail_summary.txt' + filename = "junit_double_fail_summary.txt" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--output', out_file, - '--junit', - 'tests/test_in/junit_double_fail.xml', + "--output", out_file, + "--junit", + "tests/test_in/junit_double_fail.xml", ]) self.assertEqual(2, retval) - self.assertTrue(filecmp.cmp(out_file, ref_file), f'{out_file} differs from {ref_file}') + self.assertTrue(filecmp.cmp(out_file, ref_file), f"{out_file} differs from {ref_file}") - @patch('pathlib.Path.cwd') + @patch("pathlib.Path.cwd") def test_code_quality(self, path_cwd_mock): - os.environ['MIN_SPHINX_WARNINGS'] = '0' - os.environ['MAX_SPHINX_WARNINGS'] = '0' - path_cwd_mock.return_value = '/home/user/myproject' - filename = 'code_quality.json' + os.environ["MIN_SPHINX_WARNINGS"] = "0" + os.environ["MAX_SPHINX_WARNINGS"] = "0" + path_cwd_mock.return_value = "/home/user/myproject" + filename = "code_quality.json" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--code-quality', out_file, - '--config', 'tests/test_in/config_example.json', - 'tests/test_in/mixed_warnings.txt', + "--code-quality", out_file, + "--config", "tests/test_in/config_example.json", + "tests/test_in/mixed_warnings.txt", ]) self.assertEqual(2, retval) - self.assertTrue(filecmp.cmp(out_file, ref_file), f'{out_file} differs from {ref_file}') + self.assertTrue(filecmp.cmp(out_file, ref_file), f"{out_file} differs from {ref_file}") def test_code_quality_abspath_failure(self): - os.environ['MIN_SPHINX_WARNINGS'] = '0' - os.environ['MAX_SPHINX_WARNINGS'] = '0' - filename = 'code_quality.json' + os.environ["MIN_SPHINX_WARNINGS"] = "0" + os.environ["MAX_SPHINX_WARNINGS"] = "0" + filename = "code_quality.json" out_file = str(TEST_OUT_DIR / filename) with self.assertRaises(ValueError) as c_m: warnings_wrapper([ - '--code-quality', out_file, - '--config', 'tests/test_in/config_example.json', - 'tests/test_in/mixed_warnings.txt', + "--code-quality", out_file, + "--config", "tests/test_in/config_example.json", + "tests/test_in/mixed_warnings.txt", ]) self.assertTrue(str(c_m.exception).startswith( "Failed to convert abolute path to relative path for Code Quality report: " @@ -368,66 +375,66 @@ def test_code_quality_abspath_failure(self): ) def test_cq_description_format_missing_envvar(self): - os.environ['FIRST_ENVVAR'] = 'envvar_value' - filename = 'code_quality_format.json' + os.environ["FIRST_ENVVAR"] = "envvar_value" + filename = "code_quality_format.json" out_file = str(TEST_OUT_DIR / filename) with self.assertRaises(WarningsConfigError) as c_m: warnings_wrapper([ - '--code-quality', out_file, - '--config', 'tests/test_in/config_cq_description_format.json', - 'tests/test_in/mixed_warnings.txt', + "--code-quality", out_file, + "--config", "tests/test_in/config_cq_description_format.json", + "tests/test_in/mixed_warnings.txt", ]) self.assertEqual( str(c_m.exception), "Failed to find environment variable from configuration value 'cq_description_template': 'SECOND_ENVVAR'") - @patch('pathlib.Path.cwd') + @patch("pathlib.Path.cwd") def test_cq_description_format(self, path_cwd_mock): - os.environ['FIRST_ENVVAR'] = 'envvar_value' - os.environ['SECOND_ENVVAR'] = '12345' - path_cwd_mock.return_value = '/home/user/myproject' - filename = 'code_quality_format.json' + os.environ["FIRST_ENVVAR"] = "envvar_value" + os.environ["SECOND_ENVVAR"] = "12345" + path_cwd_mock.return_value = "/home/user/myproject" + filename = "code_quality_format.json" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '-v', - '--code-quality', out_file, - '--config', 'tests/test_in/config_cq_description_format.json', - 'tests/test_in/mixed_warnings.txt', + "-v", + "--code-quality", out_file, + "--config", "tests/test_in/config_cq_description_format.json", + "tests/test_in/mixed_warnings.txt", ]) self.assertEqual( [ - 'Sphinx: Config parsing completed', - 'Doxygen: Config parsing completed', - 'Xmlrunner: Config parsing completed', + "Sphinx: Config parsing completed", + "Doxygen: Config parsing completed", + "Xmlrunner: Config parsing completed", "Coverity: Unrecognized classification 'min'", "Coverity: Unrecognized classification 'max'", - 'Coverity: Config parsing completed', - 'Sphinx: git/test/index.rst:None: WARNING: toctree contains reference to nonexisting document ' + "Coverity: Config parsing completed", + "Sphinx: git/test/index.rst:None: WARNING: toctree contains reference to nonexisting document " "u'installation'", "Sphinx: WARNING: List item 'CL-UNDEFINED_CL_ITEM' in merge/pull request 138 is not defined as a " - 'checklist-item.', + "checklist-item.", "Doxygen: Notice: Output directory `doc/doxygen/framework' does not exist. I have created it for you.", - 'Doxygen: /home/user/myproject/helper/SimpleTimer.h:19: Error: Unexpected character `"\'', - 'Doxygen: :1: Warning: The following parameters of ' - 'sofa::component::odesolver::EulerKaapiSolver::v_peq(VecId v, VecId a, double f) are not documented:', + "Doxygen: /home/user/myproject/helper/SimpleTimer.h:19: Error: Unexpected character `\"'", + "Doxygen: :1: Warning: The following parameters of " + "sofa::component::odesolver::EulerKaapiSolver::v_peq(VecId v, VecId a, double f) are not documented:", "Doxygen: error: Could not read image `/home/user/myproject/html/struct_foo_graph.png' generated by " - 'dot!', + "dot!", "Xmlrunner: ERROR [0.000s]: test_some_error_test (something.anything.somewhere)'", - 'Coverity: unclassified | src/somefile.c:82: CID 113396 (#2 of 2): Coding standard violation (MISRA ' - 'C-2012 Rule 10.1): Unclassified, Unspecified, Undecided, owner is nobody, first detected on ' - '2017-07-27.', - 'Sphinx: number of warnings (2) is higher than the maximum limit (0). Returning error code 2.' + "Coverity: unclassified | src/somefile.c:82: CID 113396 (#2 of 2): Coding standard violation (MISRA " + "C-2012 Rule 10.1): Unclassified, Unspecified, Undecided, owner is nobody, first detected on " + "2017-07-27.", + "Sphinx: number of warnings (2) is higher than the maximum limit (0). Returning error code 2." ], self.stderr_lines) self.assertEqual(2, retval) - self.assertTrue(filecmp.cmp(out_file, ref_file), f'{out_file} differs from {ref_file}') + self.assertTrue(filecmp.cmp(out_file, ref_file), f"{out_file} differs from {ref_file}") def test_polyspace_error(self): - config_file = str(TEST_IN_DIR / 'config_example_polyspace_error.yml') + config_file = str(TEST_IN_DIR / "config_example_polyspace_error.yml") with self.assertRaises(exceptions.WarningsConfigError) as context: warnings_wrapper([ - '--config', config_file, - 'tests/test_in/mixed_warnings.txt', + "--config", config_file, + "tests/test_in/mixed_warnings.txt", ]) - self.assertEqual(str(context.exception), 'Polyspace checker cannot be combined with other warnings checkers') + self.assertEqual(str(context.exception), "Polyspace checker cannot be combined with other warnings checkers") diff --git a/tests/test_junit.py b/tests/test_junit.py index fc4063c..02d22ef 100644 --- a/tests/test_junit.py +++ b/tests/test_junit.py @@ -12,21 +12,21 @@ def caplog(self, caplog): def setUp(self): self.warnings = WarningsPlugin() - self.warnings.activate_checker_name('junit', True, None) + self.warnings.activate_checker_name("junit", True, None) def test_no_warning(self): - with open('tests/test_in/junit_no_fail.xml') as xmlfile: + with open("tests/test_in/junit_no_fail.xml") as xmlfile: self.warnings.check(xmlfile.read()) self.assertEqual(self.warnings.return_count(), 0) def test_single_warning(self): - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: self.warnings.check(xmlfile.read()) self.assertEqual(self.warnings.return_count(), 1) self.assertEqual(["test_warn_plugin_single_fail.myfirstfai1ure"], self.caplog.messages) def test_dual_warning(self): - with open('tests/test_in/junit_double_fail.xml') as xmlfile: + with open("tests/test_in/junit_double_fail.xml") as xmlfile: self.warnings.check(xmlfile.read()) self.assertEqual(self.warnings.return_count(), 2) self.assertEqual(["test_warn_plugin_double_fail.myfirstfai1ure", @@ -34,5 +34,5 @@ def test_dual_warning(self): self.caplog.messages) def test_invalid_xml(self): - self.warnings.check('this is not xml') + self.warnings.check("this is not xml") self.assertEqual(self.warnings.return_count(), 0) diff --git a/tests/test_limits.py b/tests/test_limits.py index d597d83..32bc2e4 100644 --- a/tests/test_limits.py +++ b/tests/test_limits.py @@ -7,7 +7,7 @@ class TestLimits(TestCase): def setUp(self): self.warnings = WarningsPlugin() - self.warnings.activate_checker_name('doxygen', True, None) + self.warnings.activate_checker_name("doxygen", True, None) def test_set_maximum(self): for x in range(0, 10): @@ -31,7 +31,8 @@ def test_set_minimum_fail(self): self.assertRaises(ValueError, self.warnings.configure_minimum, x) def test_return_values_maximum_decrease(self): - self.warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + self.warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" " + "that does not match old title \"Some freaky test functions\"") self.assertEqual(self.warnings.return_count(), 1) self.warnings.configure_maximum(1) self.assertEqual(self.warnings.return_check_limits(), 0) @@ -39,9 +40,11 @@ def test_return_values_maximum_decrease(self): self.assertEqual(self.warnings.return_check_limits(), 1) def test_return_values_maximum_increase(self): - self.warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + self.warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" " + "that does not match old title \"Some freaky test functions\"") self.assertEqual(self.warnings.return_count(), 1) - self.warnings.check('testfile.c:12: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + self.warnings.check("testfile.c:12: warning: group test: ignoring title \"Some test functions\" " + "that does not match old title \"Some freaky test functions\"") self.assertEqual(self.warnings.return_count(), 2) self.warnings.configure_maximum(1) self.assertEqual(self.warnings.return_check_limits(), 2) @@ -49,9 +52,11 @@ def test_return_values_maximum_increase(self): self.assertEqual(self.warnings.return_check_limits(), 0) def test_return_values_minimum_increase(self): - self.warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + self.warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" " + "that does not match old title \"Some freaky test functions\"") self.assertEqual(self.warnings.return_count(), 1) - self.warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + self.warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" " + "that does not match old title \"Some freaky test functions\"") self.assertEqual(self.warnings.return_count(), 2) # default behavior self.assertEqual(self.warnings.return_check_limits(), 2) @@ -70,7 +75,7 @@ def test_invalid_minimum(self): with self.assertRaises(ValueError) as c_m: self.warnings.configure_minimum(11) self.assertEqual(str(c_m.exception), - 'Invalid argument: minimum limit must be lower than maximum limit (10); cannot set 11.') + "Invalid argument: minimum limit must be lower than maximum limit (10); cannot set 11.") def test_invalid_maximum(self): self.warnings.configure_maximum(10) @@ -78,4 +83,4 @@ def test_invalid_maximum(self): with self.assertRaises(ValueError) as c_m: self.warnings.configure_maximum(9) self.assertEqual(str(c_m.exception), - 'Invalid argument: maximum limit must be higher than minimum limit (10); cannot set 9.') + "Invalid argument: maximum limit must be higher than minimum limit (10); cannot set 9.") diff --git a/tests/test_polyspace.py b/tests/test_polyspace.py index fccf44a..baa9513 100644 --- a/tests/test_polyspace.py +++ b/tests/test_polyspace.py @@ -12,8 +12,8 @@ warnings_wrapper, ) -TEST_IN_DIR = Path(__file__).parent / 'test_in' -TEST_OUT_DIR = Path(__file__).parent / 'test_out' +TEST_IN_DIR = Path(__file__).parent / "test_in" +TEST_OUT_DIR = Path(__file__).parent / "test_out" class TestCodeProverWarnings(unittest.TestCase): @@ -24,14 +24,14 @@ def caplog(self, caplog): def setUp(self): Finding.fingerprints = {} self.warnings = WarningsPlugin() - self.dut = self.warnings.activate_checker_name('polyspace', False, None) + self.dut = self.warnings.activate_checker_name("polyspace", False, None) self.dut.checkers = [ PolyspaceFamilyChecker("run-time check", "color", "red", *self.dut.logging_args), PolyspaceFamilyChecker("run-time check", "color", "orange", *self.dut.logging_args), ] def test_code_prover_tsv_file(self): - with open(TEST_IN_DIR / 'polyspace.tsv', newline="") as file: + with open(TEST_IN_DIR / "polyspace.tsv", newline="") as file: self.warnings.check_logfile(file) count = self.warnings.return_check_limits() self.assertEqual( @@ -50,7 +50,7 @@ def caplog(self, caplog): def setUp(self): self.warnings = WarningsPlugin() - self.dut = self.warnings.activate_checker_name('polyspace', False, None) + self.dut = self.warnings.activate_checker_name("polyspace", False, None) self.dut.checkers = [ PolyspaceFamilyChecker("defect", "information", "impact: high", *self.dut.logging_args), PolyspaceFamilyChecker("defect", "information", "impact: medium", *self.dut.logging_args), @@ -58,7 +58,7 @@ def setUp(self): ] def test_bug_finder_tsv_file(self): - with open(TEST_IN_DIR / 'polyspace.tsv', newline="") as file: + with open(TEST_IN_DIR / "polyspace.tsv", newline="") as file: self.warnings.check_logfile(file) count = self.warnings.return_check_limits() self.assertEqual( @@ -74,72 +74,72 @@ def test_bug_finder_tsv_file(self): class TestPolyspaceWarnings(unittest.TestCase): def setUp(self): Finding.fingerprints = {} - os.environ['MIN_POLY_WARNINGS'] = '0' - os.environ['MAX_POLY_WARNINGS'] = '0' + os.environ["MIN_POLY_WARNINGS"] = "0" + os.environ["MAX_POLY_WARNINGS"] = "0" def tearDown(self): - for var in ('MIN_POLY_WARNINGS', 'MAX_POLY_WARNINGS'): + for var in ("MIN_POLY_WARNINGS", "MAX_POLY_WARNINGS"): if var in os.environ: del os.environ[var] def test_config_file(self): retval = warnings_wrapper([ - '--config', str(TEST_IN_DIR / 'config_example_polyspace.yml'), - str(TEST_IN_DIR / 'polyspace.tsv') + "--config", str(TEST_IN_DIR / "config_example_polyspace.yml"), + str(TEST_IN_DIR / "polyspace.tsv") ]) self.assertEqual(61, retval) def test_code_quality(self): - filename = 'polyspace_code_quality.json' + filename = "polyspace_code_quality.json" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--code-quality', out_file, - '--config', str(TEST_IN_DIR / 'config_example_polyspace.yml'), - str(TEST_IN_DIR / 'polyspace.tsv'), + "--code-quality", out_file, + "--config", str(TEST_IN_DIR / "config_example_polyspace.yml"), + str(TEST_IN_DIR / "polyspace.tsv"), ]) self.assertEqual(61, retval) self.assertTrue(filecmp.cmp(out_file, ref_file)) def test_code_quality_no_green(self): - out_file = str(TEST_OUT_DIR / 'polyspace_code_quality_green.json') - ref_file = str(TEST_IN_DIR / 'polyspace_code_quality.json') + out_file = str(TEST_OUT_DIR / "polyspace_code_quality_green.json") + ref_file = str(TEST_IN_DIR / "polyspace_code_quality.json") retval = warnings_wrapper([ - '--code-quality', out_file, - '--config', str(TEST_IN_DIR / 'config_example_polyspace_green.yml'), - str(TEST_IN_DIR / 'polyspace.tsv'), + "--code-quality", out_file, + "--config", str(TEST_IN_DIR / "config_example_polyspace_green.yml"), + str(TEST_IN_DIR / "polyspace.tsv"), ]) self.assertEqual(61, retval) self.assertTrue(filecmp.cmp(out_file, ref_file)) def test_exclude_yaml_config(self): - os.environ['PRODUCT'] = '12345' + os.environ["PRODUCT"] = "12345" filename = "polyspace_code_quality_exclude.json" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--code-quality', out_file, - '--config', str(TEST_IN_DIR / 'config_example_polyspace_exclude.yml'), - str(TEST_IN_DIR / 'polyspace.tsv'), + "--code-quality", out_file, + "--config", str(TEST_IN_DIR / "config_example_polyspace_exclude.yml"), + str(TEST_IN_DIR / "polyspace.tsv"), ]) self.assertEqual(42, retval) self.assertTrue(filecmp.cmp(out_file, ref_file)) del os.environ["PRODUCT"] def test_exclude_json_config(self): - os.environ['PRODUCT'] = '12345' + os.environ["PRODUCT"] = "12345" filename = "polyspace_code_quality_exclude.json" out_file = str(TEST_OUT_DIR / filename) ref_file = str(TEST_IN_DIR / filename) retval = warnings_wrapper([ - '--code-quality', out_file, - '--config', str(TEST_IN_DIR / 'config_example_polyspace_exclude.json'), - str(TEST_IN_DIR / 'polyspace.tsv'), + "--code-quality", out_file, + "--config", str(TEST_IN_DIR / "config_example_polyspace_exclude.json"), + str(TEST_IN_DIR / "polyspace.tsv"), ]) self.assertEqual(42, retval) self.assertTrue(filecmp.cmp(out_file, ref_file)) del os.environ["PRODUCT"] -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_robot.py b/tests/test_robot.py index 71a63c5..096797c 100644 --- a/tests/test_robot.py +++ b/tests/test_robot.py @@ -12,21 +12,21 @@ def caplog(self, caplog): def setUp(self): self.warnings = WarningsPlugin() - self.dut = self.warnings.activate_checker_name('robot', True, None) - self.suite1 = 'Suite One' - self.suite2 = 'Suite Two' + self.dut = self.warnings.activate_checker_name("robot", True, None) + self.suite1 = "Suite One" + self.suite2 = "Suite Two" self.dut.checkers = [ RobotSuiteChecker(self.suite1, *self.dut.logging_args), RobotSuiteChecker(self.suite2, *self.dut.logging_args), ] def test_no_warning(self): - with open('tests/test_in/junit_no_fail.xml') as xmlfile: + with open("tests/test_in/junit_no_fail.xml") as xmlfile: self.warnings.check(xmlfile.read()) self.assertEqual(self.warnings.return_count(), 0) def test_single_warning(self): - with open('tests/test_in/robot_single_fail.xml') as xmlfile: + with open("tests/test_in/robot_single_fail.xml") as xmlfile: self.warnings.check(xmlfile.read()) count = self.warnings.return_count() self.assertEqual(count, 1) @@ -38,9 +38,9 @@ def test_single_warning(self): def test_double_warning_and_verbosity(self): retval = warnings_wrapper([ - '--verbose', - '--robot', - 'tests/test_in/robot_double_fail.xml', + "--verbose", + "--robot", + "tests/test_in/robot_double_fail.xml", ]) self.assertEqual( [ @@ -54,37 +54,37 @@ def test_double_warning_and_verbosity(self): self.assertEqual(retval, 2) def test_invalid_xml(self): - self.warnings.check('this is not xml') + self.warnings.check("this is not xml") self.assertEqual(self.warnings.return_count(), 0) def test_testsuites_root(self): self.dut.checkers = [ - RobotSuiteChecker('test_warn_plugin_double_fail', *self.dut.logging_args), - RobotSuiteChecker('test_warn_plugin_no_double_fail', *self.dut.logging_args), + RobotSuiteChecker("test_warn_plugin_double_fail", *self.dut.logging_args), + RobotSuiteChecker("test_warn_plugin_no_double_fail", *self.dut.logging_args), ] - with open('tests/test_in/junit_double_fail.xml') as xmlfile: + with open("tests/test_in/junit_double_fail.xml") as xmlfile: self.warnings.check(xmlfile.read()) count = self.warnings.return_count() self.assertEqual(count, 2) def test_check_suite_name(self): self.dut.checkers = [ - RobotSuiteChecker('nonexistent_suite_name', *self.dut.logging_args, check_suite_name=True), + RobotSuiteChecker("nonexistent_suite_name", *self.dut.logging_args, check_suite_name=True), ] - with open('tests/test_in/robot_double_fail.xml') as xmlfile: + with open("tests/test_in/robot_double_fail.xml") as xmlfile: with self.assertRaises(SystemExit) as c_m: self.warnings.check(xmlfile.read()) self.assertEqual(c_m.exception.code, -1) def test_robot_version_5(self): self.dut.checkers = [ - RobotSuiteChecker('Empty Flash Product Id', *self.dut.logging_args, check_suite_name=True), + RobotSuiteChecker("Empty Flash Product Id", *self.dut.logging_args, check_suite_name=True), ] - with open('tests/test_in/robot_version_5.xml') as xmlfile: + with open("tests/test_in/robot_version_5.xml") as xmlfile: self.warnings.check(xmlfile.read()) count = self.warnings.return_count() self.assertEqual(count, 6) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_sphinx.py b/tests/test_sphinx.py index 9992aae..21649f9 100644 --- a/tests/test_sphinx.py +++ b/tests/test_sphinx.py @@ -12,24 +12,28 @@ def caplog(self, caplog): def setUp(self): self.warnings = WarningsPlugin() - self.warnings.activate_checker_name('sphinx', True, None) + self.warnings.activate_checker_name("sphinx", True, None) def test_no_warning(self): - self.warnings.check('This should not be treated as warning') + self.warnings.check("This should not be treated as warning") self.assertEqual(self.warnings.return_count(), 0) def test_single_warning(self): - dut = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'" + dut = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document "\ + "u'installation'" self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 1) self.assertEqual([f"{dut}"], self.caplog.messages) def test_warning_no_line_number(self): - dut1 = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'" - dut2 = "/home/bljah/test/index.rst:None: WARNING: toctree contains reference to nonexisting document u'installation'" - dut3 = "/home/bljah/test/index.rst:: WARNING: toctree contains reference to nonexisting document u'installation'" + dut1 = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document "\ + "u'installation'" + dut2 = "/home/bljah/test/index.rst:None: WARNING: toctree contains reference to nonexisting document "\ + "u'installation'" + dut3 = "/home/bljah/test/index.rst:: WARNING: toctree contains reference to nonexisting document "\ + "u'installation'" dut4 = "/home/bljah/test/SRS.rst: WARNING: item non_existing_requirement is not defined" - dut5 = "CRITICAL: Problems with \"include\" directive path:" + dut5 = "CRITICAL: Problems with\"include\" directive path:" self.warnings.check(dut1) self.warnings.check(dut2) self.warnings.check(dut3) @@ -39,9 +43,10 @@ def test_warning_no_line_number(self): self.assertEqual([f"{dut1}", f"{dut2}", f"{dut3}", f"{dut4}", f"{dut5}"], self.caplog.messages) def test_single_warning_mixed(self): - dut1 = 'This1 should not be treated as warning' - dut2 = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'" - dut3 = 'This should not be treated as warning2' + dut1 = "This1 should not be treated as warning" + dut2 = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document "\ + "u'installation'" + dut3 = "This should not be treated as warning2" self.warnings.check(dut1) self.warnings.check(dut2) self.warnings.check(dut3) @@ -49,8 +54,10 @@ def test_single_warning_mixed(self): self.assertEqual([f"{dut2}"], self.caplog.messages) def test_multiline(self): - duterr1 = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'\n" - duterr2 = "/home/bljah/test/index.rst:None: WARNING: toctree contains reference to nonexisting document u'installation'\n" + duterr1 = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document "\ + "u'installation'\n" + duterr2 = "/home/bljah/test/index.rst:None: WARNING: toctree contains reference to nonexisting document "\ + "u'installation'\n" dut = "This1 should not be treated as warning\n" dut += duterr1 dut += "This should not be treated as warning2\n" @@ -61,7 +68,7 @@ def test_multiline(self): def test_deprecation_warning(self): duterr1 = "/usr/local/lib/python3.5/dist-packages/sphinx/application.py:402: RemovedInSphinx20Warning: "\ - "app.info() is now deprecated. Use sphinx.util.logging instead. RemovedInSphinx20Warning\n" + "app.info() is now deprecated. Use sphinx.util.logging instead. RemovedInSphinx20Warning\n" dut = "This should not be treated as warning2\n" dut += duterr1 self.warnings.check(dut) @@ -69,9 +76,9 @@ def test_deprecation_warning(self): self.assertEqual([], self.caplog.messages) def test_deprecation_warning_included(self): - self.warnings.get_checker('sphinx').include_sphinx_deprecation() + self.warnings.get_checker("sphinx").include_sphinx_deprecation() duterr1 = "/usr/local/lib/python3.5/dist-packages/sphinx/application.py:402: RemovedInSphinx20Warning: "\ - "app.info() is now deprecated. Use sphinx.util.logging instead. RemovedInSphinx20Warning\n" + "app.info() is now deprecated. Use sphinx.util.logging instead. RemovedInSphinx20Warning\n" dut = "This1 should not be treated as warning\n" dut += duterr1 self.warnings.check(dut) @@ -79,7 +86,8 @@ def test_deprecation_warning_included(self): self.assertEqual([f"{duterr1.strip()}"], self.caplog.messages) def test_warning_no_docname(self): - duterr1 = "WARNING: List item 'CL-UNDEFINED_CL_ITEM' in merge/pull request 138 is not defined as a checklist-item.\n" + duterr1 = "WARNING: List item 'CL-UNDEFINED_CL_ITEM' in merge/pull request 138 is not defined as a "\ + "checklist-item.\n" self.warnings.check(duterr1) self.assertEqual(self.warnings.return_count(), 1) self.assertEqual([f"{duterr1.strip()}"], self.caplog.messages) diff --git a/tests/test_warnings.py b/tests/test_warnings.py index a2dd663..1da3c01 100644 --- a/tests/test_warnings.py +++ b/tests/test_warnings.py @@ -14,121 +14,137 @@ def caplog(self, caplog): def test_doxygen_warning(self): warnings = WarningsPlugin() - warnings.activate_checker_name('doxygen', *self.logging_args) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.activate_checker_name("doxygen", *self.logging_args) + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) def test_sphinx_warning(self): warnings = WarningsPlugin() - warnings.activate_checker_name('sphinx', *self.logging_args) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.activate_checker_name("sphinx", *self.logging_args) + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 1) def test_junit_warning(self): warnings = WarningsPlugin() - warnings.activate_checker_name('junit', *self.logging_args) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + warnings.activate_checker_name("junit", *self.logging_args) + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 1) def test_doxygen_warning_only(self): warnings = WarningsPlugin() - warnings.activate_checker_name('doxygen', *self.logging_args) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.activate_checker_name("doxygen", *self.logging_args) + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 1) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 1) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 1) def test_sphinx_warning_only(self): warnings = WarningsPlugin() - warnings.activate_checker_name('sphinx', *self.logging_args) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.activate_checker_name("sphinx", *self.logging_args) + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 1) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 1) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 1) def test_junit_warning_only(self): warnings = WarningsPlugin() - warnings.activate_checker_name('junit', *self.logging_args) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + warnings.activate_checker_name("junit", *self.logging_args) + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 1) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 1) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 1) def test_doxy_sphinx_warning(self): warnings = WarningsPlugin() - warnings.activate_checker_name('sphinx', *self.logging_args) - warnings.activate_checker_name('doxygen', *self.logging_args) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.activate_checker_name("sphinx", *self.logging_args) + warnings.activate_checker_name("doxygen", *self.logging_args) + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 2) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 2) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 2) def test_doxy_junit_warning(self): warnings = WarningsPlugin() - warnings.activate_checker_name('doxygen', *self.logging_args) - warnings.activate_checker_name('junit', *self.logging_args) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.activate_checker_name("doxygen", *self.logging_args) + warnings.activate_checker_name("junit", *self.logging_args) + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 1) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 1) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 2) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 2) def test_sphinx_junit_warning(self): warnings = WarningsPlugin() - warnings.activate_checker_name('sphinx', *self.logging_args) - warnings.activate_checker_name('junit', *self.logging_args) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') + warnings.activate_checker_name("sphinx", *self.logging_args) + warnings.activate_checker_name("junit", *self.logging_args) + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") self.assertEqual(warnings.return_count(), 0) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 1) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 2) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 2) def test_all_warning(self): warnings = WarningsPlugin() - warnings.activate_checker_name('sphinx', *self.logging_args) - warnings.activate_checker_name('doxygen', *self.logging_args) - warnings.activate_checker_name('junit', *self.logging_args) - warnings.check('testfile.c:6: warning: group test: ignoring title "Some test functions" that does not match old title "Some freaky test functions"') - self.assertEqual(warnings.return_count(), 1) - warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'") + warnings.activate_checker_name("sphinx", *self.logging_args) + warnings.activate_checker_name("doxygen", *self.logging_args) + warnings.activate_checker_name("junit", *self.logging_args) + warnings.check("testfile.c:6: warning: group test: ignoring title \"Some test functions\" that does not match " + "old title \"Some freaky test functions\"") + self.assertEqual(warnings.return_count(), 1) + warnings.check("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document " + "u'installation'") self.assertEqual(warnings.return_count(), 2) - with open('tests/test_in/junit_single_fail.xml') as xmlfile: + with open("tests/test_in/junit_single_fail.xml") as xmlfile: warnings.check(xmlfile.read()) self.assertEqual(warnings.return_count(), 3) - warnings.check('This should not be treated as warning2') + warnings.check("This should not be treated as warning2") self.assertEqual(warnings.return_count(), 3) def test_non_existent_checker_name(self): warnings = WarningsPlugin() - invalid_checker_name = 'non-existent' + invalid_checker_name = "non-existent" warnings.activate_checker_name(invalid_checker_name, *self.logging_args) self.assertEqual([f"Checker {invalid_checker_name} does not exist"], self.caplog.messages) diff --git a/tests/test_xmlrunner.py b/tests/test_xmlrunner.py index 119f272..4ce9d50 100644 --- a/tests/test_xmlrunner.py +++ b/tests/test_xmlrunner.py @@ -12,23 +12,23 @@ def caplog(self, caplog): def setUp(self): self.warnings = WarningsPlugin() - self.warnings.activate_checker_name('xmlrunner', True, None) + self.warnings.activate_checker_name("xmlrunner", True, None) def test_no_warning(self): - dut = 'This should not be treated as warning' + dut = "This should not be treated as warning" self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 0) def test_single_warning(self): - dut = 'ERROR [0.000s]: test_some_error_test (something.anything.somewhere)' + dut = "ERROR [0.000s]: test_some_error_test (something.anything.somewhere)" self.warnings.check(dut) self.assertEqual(self.warnings.return_count(), 1) self.assertEqual([f"{dut}"], self.caplog.messages) def test_single_warning_mixed(self): - dut1 = 'This1 should not be treated as warning' - dut2 = 'ERROR [0.000s]: test_some_error_test (something.anything.somewhere)' - dut3 = 'This should not be treated as warning2' + dut1 = "This1 should not be treated as warning" + dut2 = "ERROR [0.000s]: test_some_error_test (something.anything.somewhere)" + dut3 = "This should not be treated as warning2" self.warnings.check(dut1) self.warnings.check(dut2) self.warnings.check(dut3) @@ -36,8 +36,10 @@ def test_single_warning_mixed(self): self.assertEqual([f"{dut2}"], self.caplog.messages) def test_multiline(self): - duterr1 = "ERROR [0.000s]: test_some_error_test (something.anything.somewhere) \"Some test functions\" that does not match old title \"Some freaky test functions\"\n" - duterr2 = "ERROR [0.000s]: ignoring title \"Some test functions\" that does not match old title \"Some freaky test functions\"\n" + duterr1 = "ERROR [0.000s]: test_some_error_test (something.anything.somewhere) \"Some test functions\" "\ + "that does not match old title \"Some freaky test functions\"\n" + duterr2 = "ERROR [0.000s]: ignoring title \"Some test functions\" that does not match old title "\ + "\"Some freaky test functions\"\n" dut = "This1 should not be treated as warning\n" dut += duterr1 dut += "This should not be treated as warning2\n"