Skip to content

Commit

Permalink
Merge branch 'REST-API' into namespace
Browse files Browse the repository at this point in the history
  • Loading branch information
JokeWaumans committed Aug 23, 2024
2 parents bc6b704 + 391e5fb commit 3106c30
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 23 deletions.
10 changes: 9 additions & 1 deletion example/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,17 @@ PYTHONWARNINGS?= default::DeprecationWarning
PAPER ?=
BUILDDIR ?= _build

# logging variables
DEBUG ?= 0
LOGLEVEL =? WARNING

# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -ET -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
ALLSPHINXOPTS = -E -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
ifeq (${DEBUG}, 1)
ALLSPHINXOPTS += -T
endif
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .

Expand Down Expand Up @@ -43,6 +50,7 @@ clean:
-rm -rf $(BUILDDIR)/*

html:
export LOGLEVEL=$(LOGLEVEL)
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
Expand Down
12 changes: 12 additions & 0 deletions example/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
from mlx.coverity import __version__
import mlx.traceability
from decouple import config
import logging
from sphinx.util.logging import getLogger

pkg_version = __version__

Expand Down Expand Up @@ -317,3 +319,13 @@

TRACEABILITY_ITEM_ID_REGEX = r"([A-Z_]+-[A-Z0-9_]+)"
TRACEABILITY_ITEM_RELINK = {}

log_level = os.environ.get('LOGLEVEL', None)
if log_level:
try:
numeric_level = getattr(logging, log_level.upper(), None)
logger = getLogger("mlx.coverity_logging")
logger.setLevel(level=numeric_level)
except:
raise ValueError(f"Invalid log level: {log_level}")

12 changes: 7 additions & 5 deletions mlx/coverity/coverity_logging.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
"""Module to provide functions that accommodate logging."""

from sphinx.util.logging import getLogger
from logging import WARNING

LOGGER = getLogger(__name__)
LOGGER.setLevel(WARNING)


def report_warning(msg, docname, lineno=None):
Expand All @@ -11,11 +15,10 @@ def report_warning(msg, docname, lineno=None):
docname (str): Name of the document in which the error occurred
lineno (str): Line number in the document on which the error occurred
"""
logger = getLogger(__name__)
if lineno is not None:
logger.warning(msg, location=(docname, lineno))
LOGGER.warning(msg, location=(docname, lineno))
else:
logger.warning(msg, location=docname)
LOGGER.warning(msg, location=docname)


def report_info(msg, nonl=False):
Expand All @@ -25,5 +28,4 @@ def report_info(msg, nonl=False):
msg (str): Message of the warning
nonl (bool): True when no new line at end
"""
logger = getLogger(__name__)
logger.info(msg, nonl=nonl)
LOGGER.info(msg, nonl=nonl)
18 changes: 9 additions & 9 deletions mlx/coverity/coverity_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
"""Services and other utilities for Coverity scripting"""

import csv
import logging
import re
from collections import namedtuple
from urllib.parse import urlencode

import requests
from sphinx.util.logging import getLogger

from mlx.coverity_logging import report_info

# Coverity built in Impact statuses
IMPACT_LIST = ["High", "Medium", "Low"]

Expand Down Expand Up @@ -52,7 +52,7 @@ def __init__(self, hostname):
self._api_endpoint = f"https://{hostname}/api/{self.version}"
self._checkers = []
self._columns = {}
self.logger = getLogger("coverity_logging")
self.logger = getLogger("mlx.coverity_logging")

@property
def base_url(self):
Expand Down Expand Up @@ -256,7 +256,7 @@ def get_defects(self, stream, filters, column_names):
"rows": list of [list of dictionaries {"key": <key>, "value": <value>}]
}
"""
logging.info("Querying Coverity for defects in stream [%s] ...", stream)
report_info(f"Querying Coverity for defects in stream [{stream}] ...",)
query_filters = [
{
"columnKey": "streams",
Expand Down Expand Up @@ -306,7 +306,7 @@ def get_defects(self, stream, filters, column_names):
}
}

logging.info("Running Coverity query...")
report_info("Running Coverity query...")
return self.retrieve_issues(data)

def handle_attribute_filter(self, attribute_values, name, valid_attributes, allow_regex=False):
Expand All @@ -322,19 +322,19 @@ def handle_attribute_filter(self, attribute_values, name, valid_attributes, allo
Returns:
set[str]: The attributes values to query with
"""
logging.info("Using %s filter [%s]", name, attribute_values)
report_info(f"Using {name} filter [{attribute_values}]")
filter_values = set()
for field in attribute_values.split(","):
if not valid_attributes or field in valid_attributes:
logging.info("Classification [%s] is valid", field)
report_info("Classification [{field}] is valid")
filter_values.add(field)
elif allow_regex:
pattern = re.compile(field)
for element in valid_attributes:
if pattern.search(element):
filter_values.add(element)
else:
logging.error("Invalid %s filter: %s", name, field)
self.logger.error(f"Invalid {name} filter: {field}")
return filter_values

def handle_component_filter(self, attribute_values):
Expand All @@ -346,7 +346,7 @@ def handle_component_filter(self, attribute_values):
Returns:
list[str]: The list of attributes
"""
logging.info("Using Component filter [%s]", attribute_values)
report_info(f"Using Component filter [{attribute_values}]")
parser = csv.reader([attribute_values])
filter_values = []
for fields in parser:
Expand Down
34 changes: 26 additions & 8 deletions tests/test_coverity.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from pathlib import Path
from parameterized import parameterized


from mlx.coverity import SphinxCoverityConnector, CoverityDefectService
from mlx.coverity import SphinxCoverityConnector, CoverityDefect, CoverityDefectService
from .filters import test_defect_filter_0, test_defect_filter_1, test_defect_filter_2, test_defect_filter_3

TEST_FOLDER = Path(__file__).parent
Expand Down Expand Up @@ -62,6 +61,7 @@ def initialize_coverity_service(self, login=False):
return coverity_service

def test_session_by_stream_validation(self):
"""To test the session authentication, the function `validate_stream` is used."""
coverity_service = self.initialize_coverity_service(login=False)
with requests_mock.mock() as mocker:
mocker.get(self.stream_url, json={})
Expand All @@ -73,6 +73,7 @@ def test_session_by_stream_validation(self):

@patch("mlx.coverity.coverity_services.requests")
def test_stream_validation(self, mock_requests):
"""Test if the function `validate_stream` is called once with the correct url"""
mock_requests.return_value = MagicMock(spec=requests)

# Get the base url
Expand All @@ -86,6 +87,9 @@ def test_stream_validation(self, mock_requests):
mock_method.assert_called_with("https://scan.coverity.com/api/v2/streams/test_stream")

def test_retrieve_columns(self):
"""Test the function `retrieve_column_keys`.
Check if the the columns property is correctly initialized by checking if the name of a column returns
the correct key."""
with open(f"{TEST_FOLDER}/columns_keys.json", "r") as content:
column_keys = json.loads(content.read())
# initialize what needed for the REST API
Expand All @@ -102,11 +106,13 @@ def test_retrieve_columns(self):
assert coverity_service.columns["CID"] == "cid"

def test_retrieve_checkers(self):
"""Test the function `retrieve_checkers`. Check if the returned list of the checkers property is equal to the
keys of checkerAttributedata of the returned data of the request."""
self.fake_checkers = {
"checkerAttribute": {"name": "checker", "displayName": "Checker"},
"checkerAttributedata": [
{"key": "MISRA", "value": "MISRA"},
{"key": "CHECKER", "value": "CHECKER"}
{"key": "MISRA", "value": "M"},
{"key": "CHECKER", "value": "C"}
],
}
# initialize what needed for the REST API
Expand All @@ -129,6 +135,10 @@ def test_retrieve_checkers(self):
test_defect_filter_3,
])
def test_get_defects(self, filters, column_names, request_data):
"""Check get defects with different filters. Check if the response of `get_defects` is the same as expected.
The data is obtained from the filters.py file.
Due to the usage of set in `get_defects` (column_keys), the function `ordered` is used to compare the returned
data of the request where order does not matter."""
with open(f"{TEST_FOLDER}/columns_keys.json", "r") as content:
column_keys = json.loads(content.read())
self.fake_checkers = {
Expand Down Expand Up @@ -159,6 +169,9 @@ def test_get_defects(self, filters, column_names, request_data):
assert ordered(data) == ordered(request_data)

def test_get_filtered_defects(self):
"""Test `get_filtered_defects` of SphinxCoverityConnector. Check if `get_defects` is called once with the
correct arguments.
Tests also when `chart_attribute` of the node exists, the name will be added to column_names."""
sphinx_coverity_connector = SphinxCoverityConnector()
sphinx_coverity_connector.coverity_service = self.initialize_coverity_service(login=False)
sphinx_coverity_connector.stream = self.fake_stream
Expand All @@ -167,15 +180,20 @@ def test_get_filtered_defects(self):
"classification": "Intentional,Bug,Pending,Unclassified", "action": None, "component": None,
"cwe": None, "cid": None
}
column_names = {"Comment", "Checker", "Classification", "CID"}
fake_node = {"col": column_names,
"filters": node_filters}

column_names = {"Comment", "Classification", "CID"}
fake_node = CoverityDefect()
fake_node["col"] = column_names
fake_node["filters"] = node_filters
with patch.object(CoverityDefectService, "get_defects") as mock_method:
sphinx_coverity_connector.get_filtered_defects(fake_node)
mock_method.assert_called_once_with(self.fake_stream, fake_node["filters"], column_names)
fake_node["chart_attribute"] = "Checker"
column_names.add("Checker")
sphinx_coverity_connector.get_filtered_defects(fake_node)
mock_method.assert_called_with(self.fake_stream, fake_node["filters"], column_names)

def test_failed_login(self):
"""Test a failed login by mocking the status code when validating the stream."""
coverity_conf_service = CoverityDefectService("scan.coverity.com/")
stream_url = f"{coverity_conf_service.api_endpoint}/streams/{self.fake_stream}"

Expand Down
1 change: 1 addition & 0 deletions warnings_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ sphinx:
max: 0
exclude:
- 'WARNING: Connection failed: HTTPSConnectionPool\(host=.+, port=\d+\): Max retries exceeded with url: /api/v2/[\w/?=&\\]+ \(Caused by NameResolutionError\(\"<urllib3\.connection\.HTTPSConnection object at [0-9a-z]+>: Failed to resolve .+ \(\[Errno -2\] Name or service not known\)\"\)\)'
- 'WARNING: Connection failed: HTTPSConnectionPool\(host=.+, port=\d+\): Max retries exceeded with url: /api/v2/[\w/?=&\\]+ \(Caused by NameResolutionError\(\"<urllib3\.connection\.HTTPSConnection object at [0-9a-z]+>: Failed to resolve .+ \(\[Errno -3\] Temporary failure in name resolution\)\"\)\)'
- 'WARNING: CID \d+: Could not find item ID .+ in traceability collection.'
- 'WARNING: cannot cache unpickable configuration value: .traceability_attributes_sort. \(because it contains a function, class, or module object\)'
doxygen:
Expand Down

0 comments on commit 3106c30

Please sign in to comment.