1
0
Fork 0

Complete type annotations

This commit is contained in:
Maurizio Porrato 2023-08-18 11:44:43 +01:00
parent 6c109d1875
commit 0b4da7c5cf
14 changed files with 183 additions and 106 deletions

View File

@ -31,4 +31,19 @@ lint = [
"isort>=5.11.5",
"pylint>=2.13.9",
"types-PyYAML>=6.0.12.11",
"mypy>=1.5.1",
]
[tool.pylint.main]
ignore-patterns = ["^\\.#"]
[tool.pylint.basic]
no-docstring-rgx = "^(test)?_"
[tool.pylint."messages control"]
disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-symbolic-message-instead", "missing-module-docstring"]
enable = ["c-extension-no-member"]
[tool.pylint.variables]
ignored-argument-names = "_.*|^ignored_|^unused_"

View File

@ -1 +1,3 @@
from .classes import Bundle, Operator, Repo
__all__ = ["Repo", "Operator", "Bundle"]

View File

@ -2,7 +2,7 @@ import importlib
import logging
from collections.abc import Callable, Iterable
from inspect import getmembers, isfunction
from typing import Optional, Union
from typing import Any, Optional, Union
from .. import Bundle, Operator, Repo
@ -10,6 +10,10 @@ log = logging.getLogger(__name__)
class CheckResult:
"""
Generic result from a static check
"""
severity: int = 0
kind: str = "unknown"
check: Optional[str]
@ -26,16 +30,16 @@ class CheckResult:
self.check = check
self.reason = reason
def __str__(self):
def __str__(self) -> str:
return f"{self.kind}: {self.check}({self.origin}): {self.reason}"
def __repr__(self):
def __repr__(self) -> str:
return f"{self.kind}({self.check}, {self.origin}, {self.reason})"
def __int__(self):
def __int__(self) -> int:
return self.severity
def __eq__(self, other):
def __eq__(self, other: Any) -> bool:
return (self.kind, self.reason, self.check, self.origin) == (
other.kind,
other.reason,
@ -43,23 +47,31 @@ class CheckResult:
other.origin,
)
def __ne__(self, other):
def __ne__(self, other: Any) -> bool:
return not self == other
def __lt__(self, other):
def __lt__(self, other: Any) -> bool:
return int(self) < int(other)
def __hash__(self):
def __hash__(self) -> int:
return hash((self.kind, self.reason, self.check, self.origin))
class Warn(CheckResult):
"""
A non-fatal problem
"""
# pylint: disable=too-few-public-methods
severity = 40
kind = "warning"
class Fail(CheckResult):
"""
A critical problem
"""
# pylint: disable=too-few-public-methods
severity = 90
kind = "failure"
@ -72,7 +84,10 @@ Check = Callable[[Union[Repo, Operator, Bundle]], Iterable[CheckResult]]
def get_checks(
suite_name: str = "operator_repo.checks",
) -> dict[str, list[Check]]:
result = {}
"""
Return all the discovered checks in the given suite, grouped by resource type
"""
result: dict[str, list[Check]] = {}
for module_name, _ in SUPPORTED_TYPES:
result[module_name] = []
try:
@ -94,6 +109,9 @@ def get_checks(
def run_check(
check: Check, target: Union[Repo, Operator, Bundle]
) -> Iterable[CheckResult]:
"""
Run a check against a resource yielding all the problems found
"""
log.debug("Running %s check on %s", check.__name__, target)
for result in check(target):
result.check = check.__name__
@ -105,6 +123,9 @@ def run_suite(
targets: Iterable[Union[Repo, Operator, Bundle]],
suite_name: str = "operator_repo.checks",
) -> Iterable[CheckResult]:
"""
Run all the checks in a suite against a collection of resources
"""
checks = get_checks(suite_name)
for target in targets:
for target_type_name, target_type in SUPPORTED_TYPES:

View File

@ -6,7 +6,9 @@ from . import CheckResult, Fail
def check_upgrade(operator: Operator) -> Iterator[CheckResult]:
"""Validate upgrade graphs for all channels"""
all_channels = operator.channels | {operator.default_channel} - {None}
all_channels: set[str] = set(operator.channels)
if operator.default_channel is not None:
all_channels.add(operator.default_channel)
for channel in sorted(all_channels):
try:
channel_bundles = operator.channel_bundles(channel)

View File

@ -69,6 +69,10 @@ class Bundle:
@cached_property
def csv_full_name(self) -> tuple[str, str]:
"""
:return: A tuple containging operator name and bundle version
extracted from the bundle's csv file
"""
try:
csv_full_name = self.csv["metadata"]["name"]
name, version = csv_full_name.split(".", 1)
@ -80,11 +84,17 @@ class Bundle:
@property
def csv_operator_name(self) -> str:
"""
:return: The operator name from the csv file
"""
name, _ = self.csv_full_name
return name
@property
def csv_operator_version(self) -> str:
"""
:return: The bundle version from the csv file
"""
_, version = self.csv_full_name
return version
@ -109,7 +119,7 @@ class Bundle:
@property
def operator(self) -> "Operator":
"""
:return: The operator the bundle belongs to
:return: The Operator object the bundle belongs to
"""
if self._parent is None:
self._parent = Operator(self._bundle_path.parent)
@ -168,13 +178,15 @@ class Bundle:
:return: Default channel for the bundle
"""
try:
return self.annotations[
"operators.operatorframework.io.bundle.channel.default.v1"
].strip()
return str(
self.annotations[
"operators.operatorframework.io.bundle.channel.default.v1"
]
).strip()
except KeyError:
return None
def __eq__(self, other: object) -> bool:
def __eq__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
return False
if self.csv_operator_name != other.csv_operator_name:
@ -191,10 +203,10 @@ class Bundle:
)
return self.csv_operator_version == other.csv_operator_version
def __ne__(self, other: object) -> bool:
def __ne__(self, other: Any) -> bool:
return not self == other
def __lt__(self, other: object) -> bool:
def __lt__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
raise TypeError(
f"< not supported between instances of '{self.__class__.__name__}'"
@ -214,7 +226,7 @@ class Bundle:
)
return self.csv_operator_version < other.csv_operator_version
def __hash__(self):
def __hash__(self) -> int:
return hash((self.operator_name, self.operator_version))
def __repr__(self) -> str:
@ -227,6 +239,8 @@ class Bundle:
class Operator:
"""An operator containing a collection of bundles"""
_bundle_cache: dict[str, Bundle]
def __init__(self, operator_path: Union[str, Path], repo: Optional["Repo"] = None):
log.debug("Loading operator at %s", operator_path)
self._operator_path = Path(operator_path).resolve()
@ -265,6 +279,9 @@ class Operator:
@property
def repo(self) -> "Repo":
"""
:return: The Repo object the operator belongs to
"""
if self._parent is None:
self._parent = Repo(self._operator_path.parent.parent)
return self._parent
@ -327,18 +344,14 @@ class Operator:
"""
# The default channel for an operator is defined as the default
# channel of the highest bundle version
version_channel_pairs = [
(
x.csv_operator_version,
x.default_channel,
)
for x in self.all_bundles()
if x.default_channel is not None
]
try:
version_channel_pairs = [
(Version.parse(x), y) for x, y in version_channel_pairs
version_channel_pairs: list[tuple[Union[str, Version], str]] = [
(
Version.parse(x.csv_operator_version),
x.default_channel,
)
for x in self.all_bundles()
if x.default_channel is not None
]
except ValueError:
log.warning(
@ -346,6 +359,14 @@ class Operator:
" using lexical order to determine default channel",
self,
)
version_channel_pairs = [
(
x.csv_operator_version,
x.default_channel,
)
for x in self.all_bundles()
if x.default_channel is not None
]
try:
return sorted(version_channel_pairs)[-1][1]
except IndexError:
@ -428,15 +449,15 @@ class Operator:
return self._replaces_graph(channel, all_bundles)
raise ValueError(f"{self}: unknown updateGraph value: {update_strategy}")
def __eq__(self, other: object) -> bool:
def __eq__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
return False
return self.operator_name == other.operator_name
def __ne__(self, other: object) -> bool:
def __ne__(self, other: Any) -> bool:
return not self == other
def __lt__(self, other: object) -> bool:
def __lt__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
raise TypeError(
f"Can't compare {self.__class__.__name__} to {other.__class__.__name__}"
@ -446,7 +467,7 @@ class Operator:
def __iter__(self) -> Iterator[Bundle]:
yield from self.all_bundles()
def __hash__(self):
def __hash__(self) -> int:
return hash((self.operator_name,))
def __repr__(self) -> str:
@ -456,6 +477,8 @@ class Operator:
class Repo:
"""A repository containing a collection of operators"""
_operator_cache: dict[str, Operator]
OPERATORS_DIR = "operators"
def __init__(self, repo_path: Union[str, Path]):
@ -539,7 +562,7 @@ class Repo:
def __iter__(self) -> Iterator[Operator]:
yield from self.all_operators()
def __eq__(self, other: object) -> bool:
def __eq__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
return False
return self._repo_path == other._repo_path

View File

@ -72,12 +72,13 @@ def show(target: Union[Repo, Operator, Bundle], recursive: bool = False) -> None
show_bundle(target, recursive, 2 * recursive)
def action_list(repo_path, *what: str, recursive: bool = False) -> None:
repo = Repo(repo_path)
def action_list(repo: Repo, *what: str, recursive: bool = False) -> None:
if what:
targets = (parse_target(repo, x) for x in what)
targets: Iterator[Union[Repo, Operator, Bundle]] = (
parse_target(repo, x) for x in what
)
else:
targets = [repo]
targets = iter([repo])
for target in targets:
show(target, recursive)
@ -93,16 +94,17 @@ def _walk(
yield from target.all_bundles()
def action_check(
repo_path: Path, suite: str, *what: str, recursive: bool = False
) -> None:
repo = Repo(repo_path)
def action_check(repo: Repo, suite: str, *what: str, recursive: bool = False) -> None:
if what:
targets = [parse_target(repo, x) for x in what]
targets: Iterator[Union[Repo, Operator, Bundle]] = (
parse_target(repo, x) for x in what
)
else:
targets = repo.all_operators()
if recursive:
all_targets = chain.from_iterable(_walk(x) for x in targets)
all_targets: Iterator[Union[Repo, Operator, Bundle]] = chain.from_iterable(
_walk(x) for x in targets
)
else:
all_targets = targets
for result in run_suite(all_targets, suite_name=suite):

View File

@ -4,21 +4,23 @@ from typing import Any, Optional, Union
import yaml
def merge(a: dict, b: dict, path: Optional[list[str]] = None) -> dict:
def merge(
dst: dict[Any, Any], src: dict[Any, Any], path: Optional[list[str]] = None
) -> dict[Any, Any]:
"""
Recursively merge two dictionaries, with values from the second dictionary (b)
overwriting corresponding values in the first dictionary (a). This function can
Recursively merge two dictionaries, with values from the second dictionary (src)
overwriting corresponding values in the first dictionary (dst). This function can
handle nested dictionaries.
Args:
a (dict): The base dictionary to merge into.
b (dict): The dictionary with values to merge into the base dictionary.
dst (dict): The base dictionary to merge into.
src (dict): The dictionary with values to merge into the base dictionary.
path (list of str, optional): A list representing the current path in the recursive merge.
This argument is used internally for handling nested dictionaries. Users typically
don't need to provide this argument. Defaults to None.
Returns:
dict: The merged dictionary (a) after incorporating values from the second dictionary (b).
dict: The merged dictionary (dst) after incorporating values from the second dictionary (src).
Example:
dict_a = {"key1": 42, "key2": {"subkey1": "value1"}}
@ -29,18 +31,18 @@ def merge(a: dict, b: dict, path: Optional[list[str]] = None) -> dict:
"""
if path is None:
path = []
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
merge(a[key], b[key], path + [str(key)])
for key in src:
if key in dst:
if isinstance(dst[key], dict) and isinstance(src[key], dict):
merge(dst[key], src[key], path + [str(key)])
else:
a[key] = b[key]
dst[key] = src[key]
else:
a[key] = b[key]
return a
dst[key] = src[key]
return dst
def create_files(path: Union[str, Path], *contents: dict) -> None:
def create_files(path: Union[str, Path], *contents: dict[str, Any]) -> None:
"""
Create files and directories at the specified path based on the provided content.
@ -80,8 +82,10 @@ def create_files(path: Union[str, Path], *contents: dict) -> None:
full_path.mkdir(parents=True, exist_ok=True)
else:
full_path.parent.mkdir(parents=True, exist_ok=True)
if isinstance(content, (str, bytes)):
if isinstance(content, str):
full_path.write_text(content)
elif isinstance(content, bytes):
full_path.write_bytes(content)
else:
full_path.write_text(yaml.safe_dump(content))
@ -89,10 +93,10 @@ def create_files(path: Union[str, Path], *contents: dict) -> None:
def bundle_files(
operator_name: str,
bundle_version: str,
annotations: dict = None,
csv: dict = None,
other_files: dict = None,
) -> dict:
annotations: Optional[dict[str, Any]] = None,
csv: Optional[dict[str, Any]] = None,
other_files: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
"""
Create a bundle of files and metadata for an Operator package.
@ -155,11 +159,11 @@ def bundle_files(
)
def make_nested_dict(items: dict[str, Any]) -> dict:
def make_nested_dict(items: dict[str, Any]) -> dict[str, Any]:
"""
_make_nested_dict({"foo.bar": "baz"}) -> {"foo": {"bar": "baz"}}
"""
result = {}
result: dict[str, Any] = {}
for path, value in items.items():
current = result
keys = path.split(".")

View File

@ -1,6 +1,6 @@
import re
from pathlib import Path
from typing import Union
from typing import Any, Union
import pytest
@ -10,7 +10,7 @@ from tests import bundle_files, create_files, make_nested_dict
@pytest.mark.parametrize(
"bundle, extra_files, expected_results",
"base_bundle_files, extra_files, expected_results",
[
(
bundle_files("hello", "0.0.1"),
@ -37,13 +37,16 @@ from tests import bundle_files, create_files, make_nested_dict
},
),
],
False,
["Names ok", "Wrong annotations.yaml", "Empty annotations.yaml"],
indirect=False,
ids=["Names ok", "Wrong annotations.yaml", "Empty annotations.yaml"],
)
def test_operator_name(
tmp_path: Path, bundle: dict, extra_files: dict, expected_results: set[str]
tmp_path: Path,
base_bundle_files: dict[str, Any],
extra_files: dict[str, Any],
expected_results: set[str],
) -> None:
create_files(tmp_path, bundle, extra_files)
create_files(tmp_path, base_bundle_files, extra_files)
repo = Repo(tmp_path)
operator = next(repo.all_operators())
bundle = next(operator.all_bundles())
@ -51,7 +54,7 @@ def test_operator_name(
@pytest.mark.parametrize(
"bundle, extra_files, expected_results",
"base_bundle_files, extra_files, expected_results",
[
(
bundle_files("hello", "0.0.1"),
@ -122,11 +125,11 @@ def test_operator_name(
(
bundle_files("hello", "0.0.1"),
{"operators/hello/0.0.1/manifests/hello.clusterserviceversion.yaml": ""},
re.compile("Invalid CSV contents "),
"Invalid CSV contents ",
),
],
False,
[
indirect=False,
ids=[
"Missing containerImage",
"Missing deployments",
"Matching images",
@ -136,25 +139,25 @@ def test_operator_name(
)
def test_image(
tmp_path: Path,
bundle: dict,
extra_files: dict,
expected_results: Union[set[str], re.Pattern],
base_bundle_files: dict[str, Any],
extra_files: dict[str, Any],
expected_results: Union[set[str], str],
) -> None:
create_files(tmp_path, bundle, extra_files)
create_files(tmp_path, base_bundle_files, extra_files)
repo = Repo(tmp_path)
operator = next(repo.all_operators())
bundle = next(operator.all_bundles())
reasons = {x.reason for x in check_image(bundle)}
if isinstance(expected_results, re.Pattern):
if isinstance(expected_results, str):
assert len(reasons) == 1
reason = reasons.pop()
assert expected_results.match(reason)
assert expected_results in reason
else:
assert reasons == expected_results
@pytest.mark.parametrize(
"bundle, extra_files, expected_results",
"base_bundle_files, extra_files, expected_results",
[
(
bundle_files("hello", "0.0.1"),
@ -170,13 +173,16 @@ def test_image(
},
),
],
False,
["All versions ok", "Both versions invalid"],
indirect=False,
ids=["All versions ok", "Both versions invalid"],
)
def test_semver(
tmp_path: Path, bundle: dict, extra_files: dict, expected_results: set[str]
tmp_path: Path,
base_bundle_files: dict[str, Any],
extra_files: dict[str, Any],
expected_results: set[str],
) -> None:
create_files(tmp_path, bundle, extra_files)
create_files(tmp_path, base_bundle_files, extra_files)
repo = Repo(tmp_path)
operator = next(repo.all_operators())
bundle = next(operator.all_bundles())

View File

@ -1,11 +1,7 @@
from pathlib import Path
from unittest.mock import MagicMock, call, patch
import pytest
from operator_repo import Bundle, Repo
from operator_repo import Bundle
from operator_repo.checks import Fail, Warn, get_checks, run_check, run_suite
from tests import bundle_files, create_files
def test_check_result() -> None:
@ -26,11 +22,10 @@ def test_check_result() -> None:
@patch("importlib.import_module")
def test_get_checks(mock_import_module: MagicMock) -> None:
fake_module = MagicMock()
def check_fake(x):
def check_fake(_something): # type: ignore
pass
fake_module = MagicMock()
fake_module.check_fake = check_fake
fake_module.non_check_bar = lambda x: None
mock_import_module.return_value = fake_module
@ -56,7 +51,7 @@ def test_get_checks_missing_modules(mock_import_module: MagicMock) -> None:
def test_run_check(mock_bundle: Bundle) -> None:
def check_fake(x):
def check_fake(_something): # type: ignore
yield Warn("foo")
assert list(run_check(check_fake, mock_bundle)) == [
@ -66,7 +61,7 @@ def test_run_check(mock_bundle: Bundle) -> None:
@patch("operator_repo.checks.get_checks")
def test_run_suite(mock_get_checks: MagicMock, mock_bundle: Bundle) -> None:
def check_fake(x):
def check_fake(_something): # type: ignore
yield Warn("foo")
mock_get_checks.return_value = {"bundle": [check_fake], "operator": []}

View File

@ -1,4 +1,5 @@
from pathlib import Path
from typing import Any
import pytest
@ -42,8 +43,8 @@ from tests import bundle_files, create_files
{"Bundle(hello/0.0.2) has invalid 'replaces' field: 'rubbish'"},
),
],
False,
[
indirect=False,
ids=[
"Single bundle",
"Two bundles, no replaces",
"Two bundles",
@ -51,7 +52,10 @@ from tests import bundle_files, create_files
],
)
def test_upgrade(
tmp_path: Path, bundles: list[dict], operator_name: str, expected_results: set[str]
tmp_path: Path,
bundles: list[dict[str, Any]],
operator_name: str,
expected_results: set[str],
) -> None:
create_files(tmp_path, *bundles)
repo = Repo(tmp_path)

View File

@ -8,6 +8,10 @@ from tests import bundle_files, create_files
@pytest.fixture
def mock_bundle(tmp_path: Path) -> Bundle:
"""
Create a dummy file structure for a bundle and return the corresponding
Bundle object
"""
create_files(tmp_path, bundle_files("hello", "0.0.1"))
repo = Repo(tmp_path)
return repo.operator("hello").bundle("0.0.1")

View File

@ -2,7 +2,7 @@ from pathlib import Path
import pytest
from operator_repo import Bundle, Operator, Repo
from operator_repo import Bundle, Repo
from operator_repo.exceptions import InvalidBundleException, InvalidOperatorException
from tests import bundle_files, create_files
@ -23,9 +23,6 @@ def test_bundle(tmp_path: Path) -> None:
== "hello"
)
assert bundle.dependencies == []
assert bundle != "foo"
with pytest.raises(TypeError):
_ = bundle < "foo"
def test_bundle_compare(tmp_path: Path) -> None:
@ -48,6 +45,9 @@ def test_bundle_compare(tmp_path: Path) -> None:
assert hello_bundle_1 < hello_bundle_2
assert hello_bundle_1 != world_bundle_1
assert hello_bundle_1 < world_bundle_1
assert operator_hello.bundle("0.0.1") != operator_hello
with pytest.raises(TypeError):
_ = operator_hello.bundle("0.0.1") < operator_hello
def test_bundle_non_semver(tmp_path: Path) -> None:

View File

@ -40,5 +40,3 @@ def test_repo_one_bundle(tmp_path: Path) -> None:
assert set(repo) == {repo.operator("hello")}
assert repo.has("hello")
assert repo != "foo"
with pytest.raises(TypeError):
_ = repo < "foo"

View File

@ -1,5 +1,5 @@
[tox]
envlist = py{39,310,311}, pypy39
envlist = py{39,310,311,py39}
isolated_build = True ; This is required for a pyproject.toml based project.
[testenv]
@ -7,3 +7,4 @@ groups = ; Dependency groups in pyproject.toml
dev
commands =
pytest --cov-report term-missing --cov=operator_repo -v tests/
mypy --strict src tests