diff --git a/kopf/_cogs/configs/configuration.py b/kopf/_cogs/configs/configuration.py index bee55f90..5d1cda62 100644 --- a/kopf/_cogs/configs/configuration.py +++ b/kopf/_cogs/configs/configuration.py @@ -26,15 +26,16 @@ used interchangeably -- but so that it is understandable what is meant. """ import concurrent.futures -import dataclasses import logging from typing import Iterable, Optional, Union +import attrs + from kopf._cogs.configs import diffbase, progress from kopf._cogs.structs import reviews -@dataclasses.dataclass +@attrs.define class ProcessSettings: """ Settings for Kopf's OS processes: e.g. when started via CLI as `kopf run`. @@ -59,7 +60,7 @@ class ProcessSettings: """ -@dataclasses.dataclass +@attrs.define class PostingSettings: enabled: bool = True @@ -81,7 +82,7 @@ class PostingSettings: """ -@dataclasses.dataclass +@attrs.define class PeeringSettings: name: str = 'default' @@ -162,7 +163,7 @@ def namespaced(self, value: bool) -> None: self.clusterwide = not value -@dataclasses.dataclass +@attrs.define class WatchingSettings: server_timeout: Optional[float] = None @@ -187,7 +188,7 @@ class WatchingSettings: """ -@dataclasses.dataclass +@attrs.define class BatchingSettings: """ Settings for how raw events are batched and processed. @@ -224,7 +225,7 @@ class BatchingSettings: """ -@dataclasses.dataclass +@attrs.define class ScanningSettings: """ Settings for dynamic runtime observation of the cluster's setup. @@ -249,7 +250,7 @@ class ScanningSettings: """ -@dataclasses.dataclass +@attrs.define class AdmissionSettings: server: Optional[reviews.WebhookServerProtocol] = None @@ -290,14 +291,13 @@ class AdmissionSettings: """ -@dataclasses.dataclass +@attrs.define class ExecutionSettings: """ Settings for synchronous handlers execution (e.g. thread-/process-pools). """ - executor: concurrent.futures.Executor = dataclasses.field( - default_factory=concurrent.futures.ThreadPoolExecutor) + executor: concurrent.futures.Executor = attrs.Factory(concurrent.futures.ThreadPoolExecutor) """ The executor to be used for synchronous handler invocation. @@ -328,7 +328,7 @@ def max_workers(self, value: int) -> None: raise TypeError("Current executor does not support `max_workers`.") -@dataclasses.dataclass +@attrs.define class NetworkingSettings: request_timeout: Optional[float] = 5 * 60 # == aiohttp.client.DEFAULT_TIMEOUT @@ -353,7 +353,7 @@ class NetworkingSettings: """ -@dataclasses.dataclass +@attrs.define class PersistenceSettings: finalizer: str = 'kopf.zalando.org/KopfFinalizerMarker' @@ -362,20 +362,18 @@ class PersistenceSettings: from being deleted without framework's/operator's permission. """ - progress_storage: progress.ProgressStorage = dataclasses.field( - default_factory=progress.SmartProgressStorage) + progress_storage: progress.ProgressStorage = attrs.Factory(progress.SmartProgressStorage) """ How to persist the handlers' state between multiple handling cycles. """ - diffbase_storage: diffbase.DiffBaseStorage = dataclasses.field( - default_factory=diffbase.AnnotationsDiffBaseStorage) + diffbase_storage: diffbase.DiffBaseStorage = attrs.Factory(diffbase.AnnotationsDiffBaseStorage) """ How the resource's essence (non-technical, contentful fields) are stored. """ -@dataclasses.dataclass +@attrs.define class BackgroundSettings: """ Settings for background routines in general, daemons & timers specifically. @@ -434,16 +432,16 @@ class BackgroundSettings: """ -@dataclasses.dataclass +@attrs.define class OperatorSettings: - process: ProcessSettings = dataclasses.field(default_factory=ProcessSettings) - posting: PostingSettings = dataclasses.field(default_factory=PostingSettings) - peering: PeeringSettings = dataclasses.field(default_factory=PeeringSettings) - watching: WatchingSettings = dataclasses.field(default_factory=WatchingSettings) - batching: BatchingSettings = dataclasses.field(default_factory=BatchingSettings) - scanning: ScanningSettings = dataclasses.field(default_factory=ScanningSettings) - admission: AdmissionSettings =dataclasses.field(default_factory=AdmissionSettings) - execution: ExecutionSettings = dataclasses.field(default_factory=ExecutionSettings) - background: BackgroundSettings = dataclasses.field(default_factory=BackgroundSettings) - networking: NetworkingSettings = dataclasses.field(default_factory=NetworkingSettings) - persistence: PersistenceSettings = dataclasses.field(default_factory=PersistenceSettings) + process: ProcessSettings = attrs.Factory(ProcessSettings) + posting: PostingSettings = attrs.Factory(PostingSettings) + peering: PeeringSettings = attrs.Factory(PeeringSettings) + watching: WatchingSettings = attrs.Factory(WatchingSettings) + batching: BatchingSettings = attrs.Factory(BatchingSettings) + scanning: ScanningSettings = attrs.Factory(ScanningSettings) + admission: AdmissionSettings =attrs.Factory(AdmissionSettings) + execution: ExecutionSettings = attrs.Factory(ExecutionSettings) + background: BackgroundSettings = attrs.Factory(BackgroundSettings) + networking: NetworkingSettings = attrs.Factory(NetworkingSettings) + persistence: PersistenceSettings = attrs.Factory(PersistenceSettings) diff --git a/kopf/_cogs/structs/credentials.py b/kopf/_cogs/structs/credentials.py index 9a40bb3f..ffd22388 100644 --- a/kopf/_cogs/structs/credentials.py +++ b/kopf/_cogs/structs/credentials.py @@ -25,12 +25,13 @@ """ import asyncio import collections -import dataclasses import datetime import random from typing import AsyncIterable, AsyncIterator, Callable, Dict, List, \ Mapping, NewType, Optional, Tuple, TypeVar, cast +import attrs + from kopf._cogs.aiokits import aiotoggles @@ -42,7 +43,7 @@ class AccessError(Exception): """ Raised when the operator cannot access the cluster API. """ -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class ConnectionInfo: """ A single endpoint with specific credentials and connection flags to use. @@ -70,7 +71,7 @@ class ConnectionInfo: VaultKey = NewType('VaultKey', str) -@dataclasses.dataclass +@attrs.define class VaultItem: """ The actual item stored in the vault. It is never exposed externally. diff --git a/kopf/_cogs/structs/references.py b/kopf/_cogs/structs/references.py index 3782754f..e01317ee 100644 --- a/kopf/_cogs/structs/references.py +++ b/kopf/_cogs/structs/references.py @@ -1,5 +1,4 @@ import asyncio -import dataclasses import enum import fnmatch import re @@ -7,6 +6,8 @@ from typing import Collection, FrozenSet, Iterable, Iterator, List, Mapping, \ MutableMapping, NewType, Optional, Pattern, Set, Union +import attrs + # A namespace specification with globs, negations, and some minimal syntax; see `match_namespace()`. # Regexps are also supported if pre-compiled from the code, not from the CLI options as raw strings. NamespacePattern = Union[str, Pattern] @@ -100,7 +101,7 @@ def match_namespace(name: NamespaceName, pattern: NamespacePattern) -> bool: K8S_VERSION_PATTERN = re.compile(r'^v\d+(?:(?:alpha|beta)\d+)?$') -@dataclasses.dataclass(frozen=True, eq=False, repr=False) +@attrs.define(frozen=True) class Resource: """ A reference to a very specific custom or built-in resource kind. @@ -250,7 +251,7 @@ class Marker(enum.Enum): EVERYTHING = Marker.EVERYTHING -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True, init=False) class Selector: """ A resource specification that can match several resource kinds. @@ -265,15 +266,8 @@ class Selector: resource kinds. Even if those specifications look very concrete and allow no variations, they still remain specifications. """ - - arg1: dataclasses.InitVar[Union[None, str, Marker]] = None - arg2: dataclasses.InitVar[Union[None, str, Marker]] = None - arg3: dataclasses.InitVar[Union[None, str, Marker]] = None - argN: dataclasses.InitVar[None] = None # a runtime guard against too many positional arguments - group: Optional[str] = None version: Optional[str] = None - kind: Optional[str] = None plural: Optional[str] = None singular: Optional[str] = None @@ -281,45 +275,50 @@ class Selector: category: Optional[str] = None any_name: Optional[Union[str, Marker]] = None - def __post_init__( + def __init__( self, - arg1: Union[None, str, Marker], - arg2: Union[None, str, Marker], - arg3: Union[None, str, Marker], - argN: None, # a runtime guard against too many positional arguments + arg1: Union[None, str, Marker] = None, + arg2: Union[None, str, Marker] = None, + arg3: Union[None, str, Marker] = None, + *, + group: Optional[str] = None, + version: Optional[str] = None, + kind: Optional[str] = None, + plural: Optional[str] = None, + singular: Optional[str] = None, + shortcut: Optional[str] = None, + category: Optional[str] = None, + any_name: Optional[Union[str, Marker]] = None, ) -> None: + super().__init__() - # Since the class is frozen & read-only, post-creation field adjustment is done via a hack. - # This is the same hack as used in the frozen dataclasses to initialise their fields. - if argN is not None: - raise TypeError("Too many positional arguments. Max 3 positional args are accepted.") + if arg3 is not None and not isinstance(arg1, Marker) and not isinstance(arg2, Marker): + group, version, any_name = arg1, arg2, arg3 elif arg3 is not None: - object.__setattr__(self, 'group', arg1) - object.__setattr__(self, 'version', arg2) - object.__setattr__(self, 'any_name', arg3) + raise TypeError("Only the last positional argument can be an everything-marker.") elif arg2 is not None and isinstance(arg1, str) and '/' in arg1: - object.__setattr__(self, 'group', arg1.rsplit('/', 1)[0]) - object.__setattr__(self, 'version', arg1.rsplit('/')[-1]) - object.__setattr__(self, 'any_name', arg2) - elif arg2 is not None and arg1 == 'v1': - object.__setattr__(self, 'group', '') - object.__setattr__(self, 'version', arg1) - object.__setattr__(self, 'any_name', arg2) - elif arg2 is not None: - object.__setattr__(self, 'group', arg1) - object.__setattr__(self, 'any_name', arg2) + group, version = arg1.rsplit('/', 1) + any_name = arg2 + elif arg2 is not None and isinstance(arg1, str) and arg1 == 'v1': + group, version, any_name = '', arg1, arg2 + elif arg2 is not None and not isinstance(arg1, Marker): + group, any_name = arg1, arg2 elif arg1 is not None and isinstance(arg1, Marker): - object.__setattr__(self, 'any_name', arg1) + any_name = arg1 elif arg1 is not None and '.' in arg1 and K8S_VERSION_PATTERN.match(arg1.split('.')[1]): if len(arg1.split('.')) >= 3: - object.__setattr__(self, 'group', arg1.split('.', 2)[2]) - object.__setattr__(self, 'version', arg1.split('.')[1]) - object.__setattr__(self, 'any_name', arg1.split('.')[0]) + any_name, version, group = arg1.split('.', 2) + else: + any_name, version = arg1.split('.') elif arg1 is not None and '.' in arg1: - object.__setattr__(self, 'group', arg1.split('.', 1)[1]) - object.__setattr__(self, 'any_name', arg1.split('.')[0]) + any_name, group = arg1.split('.', 1) elif arg1 is not None: - object.__setattr__(self, 'any_name', arg1) + any_name = arg1 + + self.__attrs_init__( + group=group, version=version, kind=kind, plural=plural, singular=singular, + shortcut=shortcut, category=category, any_name=any_name + ) # Verify that explicit & interpreted arguments have produced an unambiguous specification. names = [self.kind, self.plural, self.singular, self.shortcut, self.category, self.any_name] @@ -336,8 +335,7 @@ def __post_init__( raise TypeError("Names must not be empty strings; either None or specific strings.") def __repr__(self) -> str: - kwargs = {f.name: getattr(self, f.name) for f in dataclasses.fields(self)} - kwtext = ', '.join([f'{key!s}={val!r}' for key, val in kwargs.items() if val is not None]) + kwtext = ', '.join([f'{k!s}={v!r}' for k, v in attrs.asdict(self).items() if v is not None]) clsname = self.__class__.__name__ return f'{clsname}({kwtext})' @@ -473,7 +471,7 @@ async def wait_for( return self[selector] -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class Insights: """ Actual resources & namespaces served by the operator. @@ -483,15 +481,15 @@ class Insights: # - **Indexed** resources block the operator startup until all objects are initially indexed. # - **Watched** resources spawn the watch-streams; the set excludes all webhook-only resources. # - **Webhook** resources are served via webhooks; the set excludes all watch-only resources. - webhook_resources: Set[Resource] = dataclasses.field(default_factory=set) - indexed_resources: Set[Resource] = dataclasses.field(default_factory=set) - watched_resources: Set[Resource] = dataclasses.field(default_factory=set) - namespaces: Set[Namespace] = dataclasses.field(default_factory=set) - backbone: Backbone = dataclasses.field(default_factory=Backbone) + webhook_resources: Set[Resource] = attrs.field(factory=set) + indexed_resources: Set[Resource] = attrs.field(factory=set) + watched_resources: Set[Resource] = attrs.field(factory=set) + namespaces: Set[Namespace] = attrs.field(factory=set) + backbone: Backbone = attrs.field(factory=Backbone) # Signalled when anything changes in the insights. - revised: asyncio.Condition = dataclasses.field(default_factory=asyncio.Condition) + revised: asyncio.Condition = attrs.field(factory=asyncio.Condition) # The flags that are set after the initial listing is finished. Not cleared afterwards. - ready_namespaces: asyncio.Event = dataclasses.field(default_factory=asyncio.Event) - ready_resources: asyncio.Event = dataclasses.field(default_factory=asyncio.Event) + ready_namespaces: asyncio.Event = attrs.field(factory=asyncio.Event) + ready_resources: asyncio.Event = attrs.field(factory=asyncio.Event) diff --git a/kopf/_core/actions/execution.py b/kopf/_core/actions/execution.py index 37573f69..330123d4 100644 --- a/kopf/_core/actions/execution.py +++ b/kopf/_core/actions/execution.py @@ -9,13 +9,13 @@ """ import asyncio import contextlib -import dataclasses import datetime import enum from contextvars import ContextVar from typing import Any, AsyncContextManager, AsyncIterator, Callable, Collection, Iterable, \ Mapping, MutableMapping, NewType, Optional, Sequence, Set, TypeVar +import attrs from typing_extensions import Protocol from kopf._cogs.configs import configuration @@ -66,7 +66,7 @@ class ErrorsMode(enum.Enum): Result = NewType('Result', object) -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class Outcome: """ An in-memory outcome of one single invocation of one single handler. @@ -86,7 +86,7 @@ class Outcome: subrefs: Collection[ids.HandlerId] = () -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True, kw_only=True) class HandlerState: """ A persisted state of a single handler, as stored on the resource's status. @@ -130,21 +130,20 @@ class State(Mapping[ids.HandlerId, HandlerState]): pass -@dataclasses.dataclass +@attrs.define class Cause(invocation.Kwargable): """ Base non-specific cause as used in the framework's reactor. """ logger: typedefs.Logger @property def _kwargs(self) -> Mapping[str, Any]: - # Similar to `dataclasses.asdict()`, but not recursive for other dataclasses. - return {field.name: getattr(self, field.name) for field in dataclasses.fields(self)} + return attrs.asdict(self, recurse=False) CauseT = TypeVar('CauseT', bound=Cause) -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class Handler: """ A handler is a function bound with its behavioral constraints. """ id: ids.HandlerId diff --git a/kopf/_core/actions/invocation.py b/kopf/_core/actions/invocation.py index 3849a0fb..66a2fccf 100644 --- a/kopf/_core/actions/invocation.py +++ b/kopf/_core/actions/invocation.py @@ -37,6 +37,7 @@ class Kwargable: function invocation module on the specialised causation logic & structures. For this reason, the `Cause` & `Kwargable` classes are split. """ + __slots__ = () @property def _kwargs(self) -> Mapping[str, Any]: diff --git a/kopf/_core/actions/progression.py b/kopf/_core/actions/progression.py index 65367ee5..158f9128 100644 --- a/kopf/_core/actions/progression.py +++ b/kopf/_core/actions/progression.py @@ -13,17 +13,18 @@ import collections.abc import copy -import dataclasses import datetime from typing import Any, Collection, Dict, Iterable, Iterator, \ Mapping, NamedTuple, Optional, overload +import attrs + from kopf._cogs.configs import progress from kopf._cogs.structs import bodies, ids, patches from kopf._core.actions import execution -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True, kw_only=True) class HandlerState(execution.HandlerState): """ A persisted state of a single handler, as stored on the resource's status. @@ -71,7 +72,7 @@ def from_storage(cls, __d: progress.ProgressRecord) -> "HandlerState": failure=__d.get('failure') or False, message=__d.get('message'), subrefs=__d.get('subrefs') or (), - _origin=__d, + origin=__d, ) def for_storage(self) -> progress.ProgressRecord: @@ -92,13 +93,13 @@ def as_in_storage(self) -> Mapping[str, Any]: return {key: val for key, val in self.for_storage().items() if val is not None} def as_active(self) -> "HandlerState": - return dataclasses.replace(self, active=True) + return attrs.evolve(self, active=True) def with_purpose( self, purpose: Optional[str], ) -> "HandlerState": - return dataclasses.replace(self, purpose=purpose) + return attrs.evolve(self, purpose=purpose) def with_outcome( self, @@ -117,7 +118,7 @@ def with_outcome( retries=(self.retries if self.retries is not None else 0) + 1, message=None if outcome.exception is None else str(outcome.exception), subrefs=list(sorted(set(self.subrefs) | set(outcome.subrefs))), # deduplicate - _origin=self._origin, + origin=self._origin, ) diff --git a/kopf/_core/actions/throttlers.py b/kopf/_core/actions/throttlers.py index c591b14b..eee85e0a 100644 --- a/kopf/_core/actions/throttlers.py +++ b/kopf/_core/actions/throttlers.py @@ -1,14 +1,15 @@ import asyncio import contextlib -import dataclasses import time from typing import AsyncGenerator, Iterable, Iterator, Optional, Tuple, Type, Union +import attrs + from kopf._cogs.aiokits import aiotime from kopf._cogs.helpers import typedefs -@dataclasses.dataclass(frozen=False) +@attrs.define class Throttler: """ A state of throttling for one specific purpose (there can be a few). """ source_of_delays: Optional[Iterator[float]] = None diff --git a/kopf/_core/engines/daemons.py b/kopf/_core/engines/daemons.py index 31691701..2f69a793 100644 --- a/kopf/_core/engines/daemons.py +++ b/kopf/_core/engines/daemons.py @@ -22,12 +22,13 @@ """ import abc import asyncio -import dataclasses import time import warnings from typing import Collection, Dict, Iterable, List, Mapping, \ MutableMapping, Optional, Sequence, Set +import attrs + from kopf._cogs.aiokits import aiotasks, aiotime, aiotoggles from kopf._cogs.configs import configuration from kopf._cogs.helpers import typedefs @@ -36,7 +37,7 @@ from kopf._core.intents import causes, handlers as handlers_, stoppers -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class Daemon: task: aiotasks.Task # a guarding task of the daemon. logger: typedefs.Logger @@ -44,13 +45,13 @@ class Daemon: stopper: stoppers.DaemonStopper # a signaller for the termination and its reason. -@dataclasses.dataclass(frozen=False) +@attrs.define class DaemonsMemory: # For background and timed threads/tasks (invoked with the kwargs of the last-seen body). live_fresh_body: Optional[bodies.Body] = None - idle_reset_time: float = dataclasses.field(default_factory=time.monotonic) - forever_stopped: Set[ids.HandlerId] = dataclasses.field(default_factory=set) - running_daemons: Dict[ids.HandlerId, Daemon] = dataclasses.field(default_factory=dict) + idle_reset_time: float = attrs.field(factory=time.monotonic) + forever_stopped: Set[ids.HandlerId] = attrs.field(factory=set) + running_daemons: Dict[ids.HandlerId, Daemon] = attrs.field(factory=dict) class DaemonsMemoriesIterator(metaclass=abc.ABCMeta): diff --git a/kopf/_core/engines/indexing.py b/kopf/_core/engines/indexing.py index c89f5fb2..9075ffd5 100644 --- a/kopf/_core/engines/indexing.py +++ b/kopf/_core/engines/indexing.py @@ -1,7 +1,8 @@ import collections.abc -import dataclasses from typing import Any, Dict, Generic, Iterable, Iterator, Mapping, Optional, Set, Tuple, TypeVar +import attrs + from kopf._cogs.configs import configuration from kopf._cogs.helpers import typedefs from kopf._cogs.structs import bodies, ephemera, ids, patches, references @@ -222,7 +223,7 @@ def make_key(self, body: bodies.Body) -> Key: so its structure and type can be safely changed any time. However, the key must be as lightweight as possible: - no dataclasses or namedtuples, only builtins. + no dataclasses or namedtuples or slotted classes, only the builtins. The name and namespace do not add value on top of the uid's uniqueness. They are here for debugging and for those rare objects @@ -270,7 +271,7 @@ def __contains__(self, id: object) -> bool: return id in self.__indexers -@dataclasses.dataclass(frozen=False) +@attrs.define class IndexingMemory: # For indexing errors backoffs/retries/timeouts. It is None when successfully indexed. indexing_state: Optional[progression.State] = None diff --git a/kopf/_core/intents/causes.py b/kopf/_core/intents/causes.py index 9a2c5e18..c1bba107 100644 --- a/kopf/_core/intents/causes.py +++ b/kopf/_core/intents/causes.py @@ -19,10 +19,11 @@ not when it is actually deleted (as the events notify): so that the handlers could execute on the yet-existing object (and its children, if created). """ -import dataclasses import enum from typing import Any, List, Mapping, Optional +import attrs + from kopf._cogs.configs import configuration from kopf._cogs.structs import bodies, diffs, ephemera, finalizers, \ ids, patches, references, reviews @@ -84,7 +85,7 @@ def __str__(self) -> str: } -@dataclasses.dataclass +@attrs.define # unfrozen for tests: they change the fields sometimes. TODO: make it frozen. class BaseCause(execution.Cause): """ Base non-specific cause as used in the framework's reactor in most cases. @@ -115,13 +116,13 @@ def _super_kwargs(self) -> Mapping[str, Any]: return self.indices -@dataclasses.dataclass +@attrs.define class ActivityCause(BaseCause): activity: Activity settings: configuration.OperatorSettings -@dataclasses.dataclass +@attrs.define class ResourceCause(BaseCause): resource: references.Resource patch: patches.Patch @@ -142,7 +143,7 @@ def _kwargs(self) -> Mapping[str, Any]: ) -@dataclasses.dataclass +@attrs.define class WebhookCause(ResourceCause): dryrun: bool reason: Optional[WebhookType] # None means "all" or expects the webhook id @@ -165,7 +166,7 @@ def _kwargs(self) -> Mapping[str, Any]: return kwargs -@dataclasses.dataclass +@attrs.define class IndexingCause(ResourceCause): """ The raw event received from the API. @@ -173,7 +174,7 @@ class IndexingCause(ResourceCause): pass -@dataclasses.dataclass +@attrs.define class WatchingCause(ResourceCause): """ The raw event received from the API. @@ -184,7 +185,7 @@ class WatchingCause(ResourceCause): event: bodies.RawEvent -@dataclasses.dataclass +@attrs.define class SpawningCause(ResourceCause): """ An internal daemon is spawning: tasks, threads, timers. @@ -201,7 +202,7 @@ def _kwargs(self) -> Mapping[str, Any]: return kwargs -@dataclasses.dataclass +@attrs.define class ChangingCause(ResourceCause): """ The cause is what has caused the whole reaction as a chain of handlers. @@ -227,7 +228,7 @@ def deleted(self) -> bool: return finalizers.is_deletion_ongoing(self.body) -@dataclasses.dataclass +@attrs.define class DaemonCause(ResourceCause): """ An exceptional case of a container for daemon invocation kwargs. diff --git a/kopf/_core/intents/handlers.py b/kopf/_core/intents/handlers.py index 59cbe388..63e769e7 100644 --- a/kopf/_core/intents/handlers.py +++ b/kopf/_core/intents/handlers.py @@ -1,12 +1,13 @@ -import dataclasses from typing import Optional, cast +import attrs + from kopf._cogs.structs import dicts, diffs, references from kopf._core.actions import execution from kopf._core.intents import callbacks, causes, filters -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class ActivityHandler(execution.Handler): fn: callbacks.ActivityFn # typing clarification activity: Optional[causes.Activity] @@ -16,7 +17,7 @@ def __str__(self) -> str: return f"Activity {self.id!r}" -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class ResourceHandler(execution.Handler): selector: Optional[references.Selector] # None is used only in sub-handlers labels: Optional[filters.MetaFilter] @@ -30,13 +31,13 @@ def adjust_cause(self, cause: execution.CauseT) -> execution.CauseT: old = dicts.resolve(cause.old, self.field, None) new = dicts.resolve(cause.new, self.field, None) diff = diffs.reduce(cause.diff, self.field) - new_cause = dataclasses.replace(cause, old=old, new=new, diff=diff) + new_cause = attrs.evolve(cause, old=old, new=new, diff=diff) return cast(execution.CauseT, new_cause) # TODO: mypy bug? else: return cause -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class WebhookHandler(ResourceHandler): fn: callbacks.WebhookFn # typing clarification reason: causes.WebhookType @@ -50,7 +51,7 @@ def __str__(self) -> str: return f"Webhook {self.id!r}" -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class IndexingHandler(ResourceHandler): fn: callbacks.IndexingFn # typing clarification @@ -58,12 +59,12 @@ def __str__(self) -> str: return f"Indexer {self.id!r}" -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class WatchingHandler(ResourceHandler): fn: callbacks.WatchingFn # typing clarification -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class ChangingHandler(ResourceHandler): fn: callbacks.ChangingFn # typing clarification reason: Optional[causes.Reason] @@ -75,13 +76,13 @@ class ChangingHandler(ResourceHandler): new: Optional[filters.ValueFilter] -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class SpawningHandler(ResourceHandler): requires_finalizer: Optional[bool] initial_delay: Optional[float] -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class DaemonHandler(SpawningHandler): fn: callbacks.DaemonFn # typing clarification cancellation_backoff: Optional[float] # how long to wait before actual cancellation. @@ -92,7 +93,7 @@ def __str__(self) -> str: return f"Daemon {self.id!r}" -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class TimerHandler(SpawningHandler): fn: callbacks.TimerFn # typing clarification sharp: Optional[bool] diff --git a/kopf/_core/intents/registries.py b/kopf/_core/intents/registries.py index df0efb19..b9d929a9 100644 --- a/kopf/_core/intents/registries.py +++ b/kopf/_core/intents/registries.py @@ -276,7 +276,7 @@ def __init__(self) -> None: activity=causes.Activity.AUTHENTICATION, errors=execution.ErrorsMode.IGNORED, param=None, timeout=None, retries=None, backoff=None, - _fallback=True, + fallback=True, )) if piggybacking.has_client(): self._activities.append(handlers.ActivityHandler( @@ -285,7 +285,7 @@ def __init__(self) -> None: activity=causes.Activity.AUTHENTICATION, errors=execution.ErrorsMode.IGNORED, param=None, timeout=None, retries=None, backoff=None, - _fallback=True, + fallback=True, )) # As a last resort, fall back to rudimentary logins if no advanced ones are available. @@ -297,7 +297,7 @@ def __init__(self) -> None: activity=causes.Activity.AUTHENTICATION, errors=execution.ErrorsMode.IGNORED, param=None, timeout=None, retries=None, backoff=None, - _fallback=True, + fallback=True, )) if not thirdparties_present and piggybacking.has_service_account(): self._activities.append(handlers.ActivityHandler( @@ -306,7 +306,7 @@ def __init__(self) -> None: activity=causes.Activity.AUTHENTICATION, errors=execution.ErrorsMode.IGNORED, param=None, timeout=None, retries=None, backoff=None, - _fallback=True, + fallback=True, )) diff --git a/kopf/_core/reactor/inventory.py b/kopf/_core/reactor/inventory.py index a17c4394..9789642d 100644 --- a/kopf/_core/reactor/inventory.py +++ b/kopf/_core/reactor/inventory.py @@ -18,21 +18,22 @@ must be kept together with their owning modules rather than mirrored in structs. """ import copy -import dataclasses from typing import Iterator, MutableMapping, Optional +import attrs + from kopf._cogs.structs import bodies, ephemera from kopf._core.actions import throttlers from kopf._core.engines import admission, daemons, indexing -@dataclasses.dataclass(frozen=False) +@attrs.define class ResourceMemory: """ A system memo about a single resource/object. Usually stored in `Memories`. """ - memo: ephemera.AnyMemo = dataclasses.field(default_factory=lambda: ephemera.AnyMemo(ephemera.Memo())) - error_throttler: throttlers.Throttler = dataclasses.field(default_factory=throttlers.Throttler) - indexing_memory: indexing.IndexingMemory = dataclasses.field(default_factory=indexing.IndexingMemory) - daemons_memory: daemons.DaemonsMemory = dataclasses.field(default_factory=daemons.DaemonsMemory) + memo: ephemera.AnyMemo = attrs.field(factory=lambda: ephemera.AnyMemo(ephemera.Memo())) + error_throttler: throttlers.Throttler = attrs.field(factory=throttlers.Throttler) + indexing_memory: indexing.IndexingMemory = attrs.field(factory=indexing.IndexingMemory) + daemons_memory: daemons.DaemonsMemory = attrs.field(factory=daemons.DaemonsMemory) # For resuming handlers tracking and deciding on should they be called or not. noticed_by_listing: bool = False diff --git a/kopf/_core/reactor/orchestration.py b/kopf/_core/reactor/orchestration.py index 1931a202..a32c5ab8 100644 --- a/kopf/_core/reactor/orchestration.py +++ b/kopf/_core/reactor/orchestration.py @@ -23,12 +23,13 @@ are started and stopped separately, not as part of the the orchestration. """ import asyncio -import dataclasses import functools import itertools import logging from typing import Any, Collection, Container, Dict, Iterable, MutableMapping, NamedTuple, Optional +import attrs + from kopf._cogs.aiokits import aiotasks, aiotoggles from kopf._cogs.configs import configuration from kopf._cogs.structs import references @@ -43,7 +44,7 @@ class EnsembleKey(NamedTuple): namespace: references.Namespace -@dataclasses.dataclass +@attrs.define(frozen=True) class Ensemble: # Global synchronisation point on the cache pre-populating stage and overall cache readiness. @@ -54,12 +55,12 @@ class Ensemble: # Multidimentional pausing: for every namespace, and a few for the whole cluster (for CRDs). operator_paused: aiotoggles.ToggleSet peering_missing: aiotoggles.Toggle - conflicts_found: Dict[EnsembleKey, aiotoggles.Toggle] = dataclasses.field(default_factory=dict) + conflicts_found: Dict[EnsembleKey, aiotoggles.Toggle] = attrs.field(factory=dict) # Multidimensional tasks -- one for every combination of relevant dimensions. - watcher_tasks: Dict[EnsembleKey, aiotasks.Task] = dataclasses.field(default_factory=dict) - peering_tasks: Dict[EnsembleKey, aiotasks.Task] = dataclasses.field(default_factory=dict) - pinging_tasks: Dict[EnsembleKey, aiotasks.Task] = dataclasses.field(default_factory=dict) + watcher_tasks: Dict[EnsembleKey, aiotasks.Task] = attrs.field(factory=dict) + peering_tasks: Dict[EnsembleKey, aiotasks.Task] = attrs.field(factory=dict) + pinging_tasks: Dict[EnsembleKey, aiotasks.Task] = attrs.field(factory=dict) def get_keys(self) -> Collection[EnsembleKey]: return (frozenset(self.watcher_tasks) | diff --git a/kopf/cli.py b/kopf/cli.py index 173d3509..0dc11995 100644 --- a/kopf/cli.py +++ b/kopf/cli.py @@ -1,9 +1,9 @@ import asyncio -import dataclasses import functools import os from typing import Any, Callable, Collection, List, Optional +import attrs import click from kopf._cogs.aiokits import aioadapters @@ -16,7 +16,7 @@ from kopf._core.reactor import running -@dataclasses.dataclass() +@attrs.define class CLIControls: """ `KopfRunner` controls, which are impossible to pass via CLI. """ ready_flag: Optional[aioadapters.Flag] = None diff --git a/setup.py b/setup.py index 4db2ee12..64d6246b 100644 --- a/setup.py +++ b/setup.py @@ -64,6 +64,7 @@ 'click', # 0.60 MB 'aiohttp<4.0.0', # 7.80 MB 'pyyaml', # 0.90 MB + 'attrs>=20.1.0', # 1.00 MB, comes from the dependencies anyway ], extras_require={ 'full-auth': [ diff --git a/tests/admission/conftest.py b/tests/admission/conftest.py index 181081fe..2a7f7fe7 100644 --- a/tests/admission/conftest.py +++ b/tests/admission/conftest.py @@ -1,8 +1,8 @@ import asyncio -import dataclasses import gc import warnings +import attrs import pyngrok.conf import pyngrok.ngrok import pytest @@ -99,7 +99,7 @@ def adm_request(resource, namespace): )) -@dataclasses.dataclass(frozen=True) +@attrs.define(frozen=True) class Responder: fn: WebhookFn fut: asyncio.Future # asyncio.Future[Response] diff --git a/tests/causation/test_kwargs.py b/tests/causation/test_kwargs.py index 6ee55c85..61a27cb1 100644 --- a/tests/causation/test_kwargs.py +++ b/tests/causation/test_kwargs.py @@ -1,7 +1,7 @@ -import dataclasses import logging from typing import Type +import attrs import pytest from mock import Mock @@ -26,7 +26,7 @@ ALL_FIELDS = { field.name for cause_cls in ALL_CAUSES - for field in dataclasses.fields(cause_cls) + for field in attrs.fields(cause_cls) } | {'stopped', 'body', 'spec', 'meta', 'status', 'name', 'namespace', 'labels', 'annotations'} @@ -38,7 +38,7 @@ def test_indices_overwrite_kwargs(cls: Type[BaseCause], name, attr): indexers['index1'] = OperatorIndexer() indexers['index2'] = OperatorIndexer() indexers[name] = OperatorIndexer() - mocks = {field.name: Mock() for field in dataclasses.fields(cls)} + mocks = {field.name: Mock() for field in attrs.fields(cls)} mocks['indices'] = indexers.indices cause = cls(**mocks) kwargs = getattr(cause, attr) # cause.kwargs / cause.sync_kwargs / cause.async_kwargs diff --git a/tests/conftest.py b/tests/conftest.py index ed11638c..65884fc3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,4 @@ import asyncio -import dataclasses import importlib import io import json @@ -10,6 +9,7 @@ from typing import Set import aiohttp.web +import attrs import pytest from mock import AsyncMock, Mock @@ -189,7 +189,7 @@ def registry(registry_factory): # -@dataclasses.dataclass(frozen=True, eq=False) +@attrs.define(frozen=True) class K8sMocks: get: Mock post: Mock @@ -353,7 +353,7 @@ def hostname(): return 'fake-host' -@dataclasses.dataclass(frozen=True, eq=False, order=False) +@attrs.define(frozen=True) class LoginMocks: pykube_in_cluster: Mock = None pykube_from_file: Mock = None diff --git a/tests/handling/conftest.py b/tests/handling/conftest.py index 72d0fdc4..3369edf2 100644 --- a/tests/handling/conftest.py +++ b/tests/handling/conftest.py @@ -33,9 +33,9 @@ all possible cases properly. In the top-level event handling, we assume they do, and only check for the upper-level behaviour, not all of the input combinations. """ -import dataclasses from typing import Callable +import attrs import pytest from mock import Mock @@ -48,7 +48,7 @@ def _auto_mocked(k8s_mocked): pass -@dataclasses.dataclass(frozen=True, eq=False, order=False) +@attrs.define(frozen=True, eq=False, order=False) class HandlersContainer: index_mock: Mock event_mock: Mock diff --git a/tests/k8s/test_patching.py b/tests/k8s/test_patching.py index a15258f3..1d247a68 100644 --- a/tests/k8s/test_patching.py +++ b/tests/k8s/test_patching.py @@ -1,6 +1,5 @@ -import dataclasses - import aiohttp.web +import attrs import pytest from kopf._cogs.clients.errors import APIError @@ -33,7 +32,7 @@ async def test_without_subresources( async def test_status_as_subresource_with_combined_payload( resp_mocker, aresponses, hostname, settings, resource, namespace, logger): - resource = dataclasses.replace(resource, subresources=['status']) + resource = attrs.evolve(resource, subresources=['status']) # Simulate Kopf's initial state and intention. patch = Patch({'spec': {'x': 'y'}, 'status': {'s': 't'}}) @@ -77,7 +76,7 @@ async def test_status_as_subresource_with_combined_payload( async def test_status_as_subresource_with_object_fields_only( resp_mocker, aresponses, hostname, settings, resource, namespace, logger): - resource = dataclasses.replace(resource, subresources=['status']) + resource = attrs.evolve(resource, subresources=['status']) # Simulate Kopf's initial state and intention. patch = Patch({'spec': {'x': 'y'}}) @@ -118,7 +117,7 @@ async def test_status_as_subresource_with_object_fields_only( async def test_status_as_subresource_with_status_fields_only( resp_mocker, aresponses, hostname, settings, resource, namespace, logger): - resource = dataclasses.replace(resource, subresources=['status']) + resource = attrs.evolve(resource, subresources=['status']) # Simulate Kopf's initial state and intention. patch = Patch({'status': {'s': 't'}}) diff --git a/tests/references/test_selector_parsing.py b/tests/references/test_selector_parsing.py index 159bcf99..673669e0 100644 --- a/tests/references/test_selector_parsing.py +++ b/tests/references/test_selector_parsing.py @@ -119,9 +119,8 @@ def test_three_args(group, version, name): def test_too_many_args(): - with pytest.raises(TypeError) as err: + with pytest.raises(TypeError): Selector('group1', 'version1', 'name1', 'etc') - assert "Too many positional arguments" in str(err.value) def test_kwarg_group(): diff --git a/tests/registries/test_matching_of_callbacks.py b/tests/registries/test_matching_of_callbacks.py index 4aaff10c..2549cf0a 100644 --- a/tests/registries/test_matching_of_callbacks.py +++ b/tests/registries/test_matching_of_callbacks.py @@ -1,5 +1,4 @@ -import dataclasses - +import attrs import pytest from mock import Mock @@ -35,9 +34,9 @@ def handler(request, callback, selector): fn=some_fn, id='a', param=None, errors=None, timeout=None, retries=None, backoff=None, ) if request.param in ['annotations', 'labels']: - handler = dataclasses.replace(handler, **{request.param: {'known': callback}}) + handler = attrs.evolve(handler, **{request.param: {'known': callback}}) else: - handler = dataclasses.replace(handler, **{request.param: callback}) + handler = attrs.evolve(handler, **{request.param: callback}) return handler @@ -69,7 +68,7 @@ def test_callback_is_called_with_matching_resource( def test_callback_is_not_called_with_mismatching_resource( match_fn, callback, handler, cause, ): - cause = dataclasses.replace(cause, resource=Resource(group='x', version='y', plural='z')) + cause = attrs.evolve(cause, resource=Resource(group='x', version='y', plural='z')) result = match_fn(handler=handler, cause=cause) assert not result assert not callback.called