Skip to content

Commit

Permalink
Add mypy flags --warn-redundant-casts and --warn-unused-ignores (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
haakonvt authored Sep 26, 2023
1 parent 159b75d commit 7d9535c
Show file tree
Hide file tree
Showing 26 changed files with 35 additions and 42 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -918,7 +918,7 @@ It will also cache the token between runs.
### Changed
- Client configuration no longer respects any environment variables. There are other libraries better
suited for loading configuration from the environment (such as builtin `os` or `pydantic`). There have also
been several reports of ennvar name clash issues in tools built on top the SDK. We therefore
been several reports of envvar name clash issues in tools built on top the SDK. We therefore
consider this something that should be handled by the application consuming the SDK. All configuration of
`cognite.client.CogniteClient` now happens using a `cognite.client.ClientConfig` object. Global configuration such as
`max_connection_pool_size` and other options which apply to all client instances are now configured through
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def suggest(self, annotations: Annotation | Sequence[Annotation]) -> Annotation
list_cls=AnnotationList,
resource_cls=Annotation,
resource_path=self._RESOURCE_PATH + "/suggest",
items=items, # type: ignore[arg-type]
items=items,
)

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/data_modeling/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def __iter__(self) -> Iterator[Container]:
Returns:
Iterator[Container]: yields Containers one by one.
"""
return cast(Iterator[Container], self())
return self()

@overload
def retrieve(self, ids: ContainerIdentifier) -> Container | None:
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/data_modeling/data_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def __iter__(self) -> Iterator[DataModel]:
Returns:
Iterator[DataModel]: yields DataModels one by one.
"""
return cast(Iterator[DataModel], self())
return self()

@overload
def retrieve(
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/data_modeling/instances.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def __iter__(self) -> Iterator[Node]:
Returns:
Iterator[Node]: yields Instances one by one.
"""
return cast(Iterator[Node], self(None, "node"))
return self(None, "node")

def retrieve(
self,
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/data_modeling/spaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def __iter__(self) -> Iterator[Space]:
Returns:
Iterator[Space]: yields Spaces one by one.
"""
return cast(Iterator[Space], self())
return self()

@overload
def retrieve(self, space: str) -> Space | None: # type: ignore[misc]
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/datapoint_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def _validate_id_or_xid(

elif isinstance(ts, dict):
ts_validated = self._validate_user_supplied_dict_keys(ts, arg_name)
if not isinstance(identifier := ts_validated[arg_name], exp_type): # type: ignore [literal-required]
if not isinstance(identifier := ts_validated[arg_name], exp_type):
self._raise_on_wrong_ts_identifier_type(identifier, arg_name, exp_type)
# We merge 'defaults' and given ts-dict, ts-dict takes precedence:
ts_dct = {**self.defaults, **ts_validated}
Expand Down
8 changes: 4 additions & 4 deletions cognite/client/_api/datapoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -1138,10 +1138,10 @@ def retrieve_dataframe_in_tz(
duplicated = find_duplicates(identifiers.as_primitives())
raise ValueError(f"The following identifiers were not unique: {duplicated}")

intervals = to_fixed_utc_intervals(start, end, granularity) # type: ignore [arg-type]
intervals = to_fixed_utc_intervals(start, end, granularity)

queries = [
{**ident_dct, "aggregates": aggregates, **interval} # type: ignore [arg-type]
{**ident_dct, "aggregates": aggregates, **interval}
for ident_dct, interval in itertools.product(identifiers.as_dicts(), intervals)
]

Expand All @@ -1159,8 +1159,8 @@ def retrieve_dataframe_in_tz(
)

if uniform_index:
freq = to_pandas_freq(granularity, start) # type: ignore [arg-type]
start, end = align_large_granularity(start, end, granularity) # type: ignore [arg-type]
freq = to_pandas_freq(granularity, start)
start, end = align_large_granularity(start, end, granularity)
return df.reindex(pandas_date_range_tz(start, end, freq, inclusive="left"))

return df
Expand Down
3 changes: 0 additions & 3 deletions cognite/client/_api/documents.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from pathlib import Path
from typing import IO, TYPE_CHECKING, BinaryIO, Literal, cast, overload

from requests import Response

from cognite.client._api_client import APIClient
from cognite.client._constants import DEFAULT_LIMIT_READ
from cognite.client.data_classes import filters
Expand Down Expand Up @@ -542,7 +540,6 @@ def retrieve_content_buffer(self, id: int, buffer: BinaryIO) -> None:
with self._do_request(
"GET", f"{self._RESOURCE_PATH}/{id}/content", stream=True, accept="text/plain"
) as response:
response = cast(Response, response)
for chunk in response.iter_content(chunk_size=2**21):
if chunk: # filter out keep-alive new chunks
buffer.write(chunk)
Expand Down
5 changes: 0 additions & 5 deletions cognite/client/_api/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from io import BufferedReader
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Iterator,
Expand Down Expand Up @@ -36,9 +35,6 @@
from cognite.client.utils._identifier import Identifier, IdentifierSequence
from cognite.client.utils._validation import process_asset_subtree_ids, process_data_set_ids

if TYPE_CHECKING:
from requests import Response


class FilesAPI(APIClient):
_RESOURCE_PATH = "/files"
Expand Down Expand Up @@ -762,7 +758,6 @@ def _download_file_to_path(self, download_link: str, path: Path, chunk_size: int
with self._http_client_with_retry.request(
"GET", download_link, stream=True, timeout=self._config.file_transfer_timeout
) as r:
r = cast("Response", r)
with path.open("wb") as f:
for chunk in r.iter_content(chunk_size=chunk_size):
if chunk: # filter out keep-alive new chunks
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/sequences.py
Original file line number Diff line number Diff line change
Expand Up @@ -938,7 +938,7 @@ def insert(
rows_per_request = self._SEQ_POST_LIMIT_ROWS

row_objs = [{"rows": all_rows[i : i + rows_per_request]} for i in range(0, len(all_rows), rows_per_request)]
tasks = [({**base_obj, **rows},) for rows in row_objs] # type: ignore
tasks = [({**base_obj, **rows},) for rows in row_objs]
summary = utils._concurrency.execute_tasks(self._insert_data, tasks, max_workers=self._config.max_workers)
summary.raise_compound_exception_if_failed_tasks()

Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/synthetic_time_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def _build_expression(
for k, v in variables.items():
if isinstance(v, TimeSeries):
v = v.external_id
expression_with_ts = re.sub( # type: ignore
expression_with_ts = re.sub(
re.compile(rf"\b{k}\b"), f"ts{{externalId:'{v}'{aggregate_str}}}", expression_with_ts
)
return expression_with_ts, expression_str
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -838,7 +838,7 @@ def _create_multiple(

def unwrap_element(el: T) -> CogniteResource | T:
if isinstance(el, dict):
return input_resource_cls._load(el, cognite_client=self._cognite_client) # type: ignore[union-attr]
return input_resource_cls._load(el, cognite_client=self._cognite_client)
else:
return el

Expand Down
2 changes: 1 addition & 1 deletion cognite/client/data_classes/assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -762,7 +762,7 @@ def _locate_cycles(self) -> tuple[int, list[list[str]]]:
elif parent in has_cycles or xid == parent:
has_cycles.add(xid)
else:
self._cycle_search(cast(str, xid), parent, edges, no_cycles, has_cycles)
self._cycle_search(xid, parent, edges, no_cycles, has_cycles)

return len(has_cycles), find_all_cycles_with_elements(has_cycles, edges)

Expand Down
4 changes: 2 additions & 2 deletions cognite/client/data_classes/data_modeling/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import json
from abc import ABC, abstractmethod
from dataclasses import asdict, dataclass
from typing import Any, Literal, cast
from typing import Any, Literal

from cognite.client.data_classes._base import (
CogniteFilter,
Expand Down Expand Up @@ -64,7 +64,7 @@ def load(cls, resource: dict | str) -> ContainerCore:
data["indexes"] = {k: Index.load(v) for k, v in data["indexes"].items()} or None
if "properties" in data:
data["properties"] = {k: ContainerProperty.load(v) for k, v in data["properties"].items()} or None
return cast(ContainerCore, super().load(data))
return super().load(data)

def dump(self, camel_case: bool = False) -> dict[str, Any]:
output = super().dump(camel_case)
Expand Down
3 changes: 1 addition & 2 deletions cognite/client/data_classes/data_modeling/data_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,7 @@ def load(cls, resource: dict | str) -> DataModelApply:
data = json.loads(resource) if isinstance(resource, str) else resource
if "views" in data:
data["views"] = [cls._load_view(v) for v in data["views"]] or None

return cast(DataModelApply, super().load(data))
return super().load(data)

def dump(self, camel_case: bool = False) -> dict[str, Any]:
output = super().dump(camel_case)
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/data_classes/data_modeling/ids.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def _load_identifier(
def create_args(id_: Id) -> tuple[str, str, str | None, Literal["node", "edge"] | None]:
if isinstance(id_, tuple) and is_instance:
if len(id_) == 2:
return id_[0], id_[1], None, id_type # type: ignore[misc, return-value]
return id_[0], id_[1], None, id_type # type: ignore[return-value]
raise ValueError("Instance given as a tuple must have two elements (space, externalId)")
if isinstance(id_, tuple):
return id_[0], id_[1], id_[2] if len(id_) == 3 else None, None # type: ignore[misc]
Expand Down
6 changes: 3 additions & 3 deletions cognite/client/data_classes/data_modeling/instances.py
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,7 @@ def dump(self, camel_case: bool = False) -> dict[str, Any]:
@classmethod
def load(cls, data: dict | str) -> EdgeApply:
data = json.loads(data) if isinstance(data, str) else data
instance = cast(EdgeApply, super().load(data))
instance = super().load(data)

instance.type = DirectRelationReference.load(data["type"])
instance.start_node = DirectRelationReference.load(data["startNode"])
Expand All @@ -529,7 +529,7 @@ def load(cls, data: dict | str) -> EdgeApply:


class Edge(Instance):
"""An Edge. This is the read version of the edge.
"""An Edge. This is the read version of the edge.
Args:
space (str): The workspace for the edge an unique identifier for the space.
Expand Down Expand Up @@ -607,7 +607,7 @@ def dump(self, camel_case: bool = False) -> dict[str, Any]:
@classmethod
def load(cls, data: dict | str) -> Edge:
data = json.loads(data) if isinstance(data, str) else data
instance = cast(Edge, super().load(data))
instance = super().load(data)

instance.type = DirectRelationReference.load(data["type"])
instance.start_node = DirectRelationReference.load(data["startNode"])
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/data_classes/data_modeling/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def load(cls, resource: dict | str) -> ViewCore:
if "filter" in data:
data["filter"] = Filter.load(data["filter"])

return cast(ViewCore, super().load(data))
return super().load(data)

def dump(self, camel_case: bool = False) -> dict[str, Any]:
output = super().dump(camel_case)
Expand Down
4 changes: 2 additions & 2 deletions cognite/client/data_classes/datapoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -725,7 +725,7 @@ def get( # type: ignore [override]
DatapointsArray | list[DatapointsArray] | None: The requested item(s)
"""
# TODO: Question, can we type annotate without specifying the function?
return super().get(id, external_id) # type: ignore [return-value]
return super().get(id, external_id)

def __str__(self) -> str:
return json.dumps(self.dump(convert_timestamps=True), indent=4)
Expand Down Expand Up @@ -804,7 +804,7 @@ def get( # type: ignore [override]
Datapoints | list[Datapoints] | None: The requested item(s)
"""
# TODO: Question, can we type annotate without specifying the function?
return super().get(id, external_id) # type: ignore [return-value]
return super().get(id, external_id)

def __str__(self) -> str:
item = self.dump()
Expand Down
4 changes: 2 additions & 2 deletions cognite/client/data_classes/sequences.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ def to_pandas(self, column_names: str = "columnExternalId") -> pandas.DataFrame:
for eid in self.column_external_ids
]
# TODO: Optimization required (None/nan):
return pd.DataFrame( # type: ignore
return pd.DataFrame(
[[x if x is not None else math.nan for x in r] for r in self.values],
index=self.row_numbers,
columns=df_columns,
Expand Down Expand Up @@ -475,7 +475,7 @@ def to_pandas(self, column_names: str = "externalId|columnExternalId") -> pandas
pandas.DataFrame: The sequence data list as a pandas DataFrame.
"""
pd = utils._auxiliary.local_import("pandas")
return pd.concat([seq_data.to_pandas(column_names=column_names) for seq_data in self.data], axis=1) # type: ignore
return pd.concat([seq_data.to_pandas(column_names=column_names) for seq_data in self.data], axis=1)


class SequenceProperty(EnumProperty):
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/data_classes/transformations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ def _try_get_or_create_nonce(
other_client = CogniteClient(config)
try:
session = other_client.iam.sessions.create(credentials)
ret = sessions_cache[key] = NonceCredentials(cast(int, session.id), cast(str, session.nonce), project)
ret = sessions_cache[key] = NonceCredentials(session.id, session.nonce, project)
except CogniteAPIError as err:
# This is fine, we might be missing SessionsACL. The OIDC credentials will then be passed
# directly to the backend service. We do however not catch CogniteAuthError as that would
Expand Down
4 changes: 2 additions & 2 deletions cognite/client/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,6 @@ def monkeypatch_cognite_client() -> Iterator[CogniteClientMock]:
>>> assert "Something went wrong" == e.message
"""
cognite_client_mock = CogniteClientMock()
CogniteClient.__new__ = lambda *args, **kwargs: cognite_client_mock # type: ignore[assignment]
CogniteClient.__new__ = lambda *args, **kwargs: cognite_client_mock # type: ignore[method-assign]
yield cognite_client_mock
CogniteClient.__new__ = lambda cls, *args, **kwargs: object.__new__(cls) # type: ignore[assignment]
CogniteClient.__new__ = lambda cls, *args, **kwargs: object.__new__(cls) # type: ignore[method-assign]
2 changes: 1 addition & 1 deletion cognite/client/utils/_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def find_extended_cycle(slow: str, edges: dict, skip: set[str]) -> tuple[set[str
if slow in skip:
return all_elements, []

all_elements.add(slow := edges[slow]) # type: ignore [arg-type, assignment]
all_elements.add(slow := edges[slow])
fast = edges[edges[fast]]

loop_elements = [loop_start := slow]
Expand Down
4 changes: 2 additions & 2 deletions cognite/client/utils/_pyodide_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def patch_sdk_for_pyodide() -> None:

# - Use another HTTP adapter:
cc._http_client.HTTPClient._old__init__ = cc._http_client.HTTPClient.__init__ # type: ignore [attr-defined]
cc._http_client.HTTPClient.__init__ = http_client__init__ # type: ignore [assignment]
cc._http_client.HTTPClient.__init__ = http_client__init__ # type: ignore [method-assign]

# - Inject these magic classes into the correct modules so that the user may import them normally:
cc.config.FusionNotebookConfig = FusionNotebookConfig # type: ignore [attr-defined]
Expand Down Expand Up @@ -64,7 +64,7 @@ def http_client__init__(
refresh_auth_header: Callable[[MutableMapping[str, Any]], None],
retry_tracker_factory: Callable[[HTTPClientConfig], _RetryTracker] = _RetryTracker,
) -> None:
import pyodide_http # type: ignore [import]
import pyodide_http

self._old__init__(config, session, refresh_auth_header, retry_tracker_factory) # type: ignore [attr-defined]
self.session.mount("https://", pyodide_http._requests.PyodideHTTPAdapter())
Expand Down
2 changes: 2 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ plugins = numpy.typing.mypy_plugin
no_implicit_reexport = true
exclude = _priority_tpe\.py$|cognite/client/_proto.*
no_implicit_optional = true
warn_redundant_casts = true
warn_unused_ignores = true

[mypy-msal.*]
ignore_missing_imports = true
Expand Down

0 comments on commit 7d9535c

Please sign in to comment.