diff --git a/CHANGELOG.md b/CHANGELOG.md index d4a462a69..82d47df03 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,10 @@ Changes are grouped as follows - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [0.91.3] - 2024-03-21 +### Added +* Regenerated the SDK with the latest version of `pygen` to get GraphQL feature for querying and parsing responses from cdf + ## [0.91.2] - 2024-03-15 ### Changed * Changed property in `MultiScenarioMatrixRaw` for `shopResults` to `SHOPResultPriceProd` and added the `MultiScenarioMatrixRaw` to the `TotalBidCalculation` data model. diff --git a/cognite/powerops/_version.py b/cognite/powerops/_version.py index 4d050cb33..8e77cc68d 100644 --- a/cognite/powerops/_version.py +++ b/cognite/powerops/_version.py @@ -1 +1 @@ -__version__ = "0.91.2" +__version__ = "0.91.3" diff --git a/cognite/powerops/client/_generated/afrr_bid/_api/_core.py b/cognite/powerops/client/_generated/afrr_bid/_api/_core.py index 680554875..3c71c8747 100644 --- a/cognite/powerops/client/_generated/afrr_bid/_api/_core.py +++ b/cognite/powerops/client/_generated/afrr_bid/_api/_core.py @@ -21,6 +21,7 @@ DomainModelCore, DomainModelWrite, DomainRelationWrite, + GraphQLList, ResourcesWriteResult, T_DomainModel, T_DomainModelWrite, @@ -32,6 +33,8 @@ DomainModelCore, DomainRelation, ) +from cognite.powerops.client._generated.afrr_bid import data_classes + DEFAULT_LIMIT_READ = 25 DEFAULT_QUERY_LIMIT = 3 @@ -751,3 +754,41 @@ def _create_edge_filter( if filter: filters.append(filter) return dm.filters.And(*filters) + + +class GraphQLQueryResponse: + def __init__(self, data_model_id: dm.DataModelId): + self._output = GraphQLList([]) + self._data_class_by_type = _GRAPHQL_DATA_CLASS_BY_DATA_MODEL_BY_TYPE[data_model_id] + + def parse(self, response: dict[str, Any]) -> GraphQLList: + if "errors" in response: + raise RuntimeError(response["errors"]) + _, data = list(response.items())[0] + self._parse_item(data) + return self._output + + def _parse_item(self, data: dict[str, Any]) -> None: + if "items" in data: + for item in data["items"]: + self._parse_item(item) + elif "__typename" in data: + try: + item = self._data_class_by_type[data["__typename"]].model_validate(data) + except KeyError: + raise ValueError(f"Could not find class for type {data['__typename']}") + else: + self._output.append(item) + else: + raise RuntimeError("Missing '__typename' in GraphQL response. Cannot determine the type of the response.") + + +_GRAPHQL_DATA_CLASS_BY_DATA_MODEL_BY_TYPE = { + dm.DataModelId("power-ops-afrr-bid", "AFRRBid", "1"): { + "BidDocument": data_classes.BidDocumentGraphQL, + "BidRow": data_classes.BidRowGraphQL, + "PriceArea": data_classes.PriceAreaGraphQL, + "BidMethod": data_classes.BidMethodGraphQL, + "Alert": data_classes.AlertGraphQL, + }, +} diff --git a/cognite/powerops/client/_generated/afrr_bid/_api_client.py b/cognite/powerops/client/_generated/afrr_bid/_api_client.py index 4d2e4e044..1264657bb 100644 --- a/cognite/powerops/client/_generated/afrr_bid/_api_client.py +++ b/cognite/powerops/client/_generated/afrr_bid/_api_client.py @@ -2,7 +2,7 @@ import warnings from pathlib import Path -from typing import Sequence +from typing import Any, Sequence from cognite.client import ClientConfig, CogniteClient, data_modeling as dm from cognite.client.data_classes import TimeSeriesList @@ -13,8 +13,8 @@ from ._api.bid_method import BidMethodAPI from ._api.bid_row import BidRowAPI from ._api.price_area import PriceAreaAPI -from ._api._core import SequenceNotStr -from .data_classes._core import DEFAULT_INSTANCE_SPACE +from ._api._core import SequenceNotStr, GraphQLQueryResponse +from .data_classes._core import DEFAULT_INSTANCE_SPACE, GraphQLList from . import data_classes @@ -23,9 +23,9 @@ class AFRRBidAPI: AFRRBidAPI Generated with: - pygen = 0.99.11 - cognite-sdk = 7.26.0 - pydantic = 2.6.3 + pygen = 0.99.14 + cognite-sdk = 7.26.2 + pydantic = 2.6.4 Data Model: space: power-ops-afrr-bid @@ -41,7 +41,7 @@ def __init__(self, config_or_client: CogniteClient | ClientConfig): else: raise ValueError(f"Expected CogniteClient or ClientConfig, got {type(config_or_client)}") # The client name is used for aggregated logging of Pygen Usage - client.config.client_name = "CognitePygen:0.99.11" + client.config.client_name = "CognitePygen:0.99.14" view_by_read_class = { data_classes.Alert: dm.ViewId("power-ops-shared", "Alert", "1"), @@ -64,6 +64,7 @@ def upsert( items: data_classes.DomainModelWrite | Sequence[data_classes.DomainModelWrite], replace: bool = False, write_none: bool = False, + allow_version_increase: bool = False, ) -> data_classes.ResourcesWriteResult: """Add or update (upsert) items. @@ -73,17 +74,27 @@ def upsert( Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. write_none (bool): This method will, by default, skip properties that are set to None. However, if you want to set properties to None, you can set this parameter to True. Note this only applies to properties that are nullable. + allow_version_increase (bool): If set to true, the version of the instance will be increased if the instance already exists. + If you get an error: 'A version conflict caused the ingest to fail', you can set this to true to allow + the version to increase. Returns: Created instance(s), i.e., nodes, edges, and time series. """ if isinstance(items, data_classes.DomainModelWrite): - instances = items.to_instances_write(self._view_by_read_class, write_none) + instances = items.to_instances_write(self._view_by_read_class, write_none, allow_version_increase) else: instances = data_classes.ResourcesWrite() cache: set[tuple[str, str]] = set() for item in items: - instances.extend(item._to_instances_write(cache, self._view_by_read_class, write_none)) + instances.extend( + item._to_instances_write( + cache, + self._view_by_read_class, + write_none, + allow_version_increase, + ) + ) result = self._client.data_modeling.instances.apply( nodes=instances.nodes, edges=instances.edges, @@ -151,6 +162,17 @@ def delete( nodes=[(space, id) for id in external_id], ) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the AFRRBid data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("power-ops-afrr-bid", "AFRRBid", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + @classmethod def azure_project( cls, tenant_id: str, client_id: str, client_secret: str, cdf_cluster: str, project: str diff --git a/cognite/powerops/client/_generated/afrr_bid/data_classes/__init__.py b/cognite/powerops/client/_generated/afrr_bid/data_classes/__init__.py index 7f0f44b50..642eb1a84 100644 --- a/cognite/powerops/client/_generated/afrr_bid/data_classes/__init__.py +++ b/cognite/powerops/client/_generated/afrr_bid/data_classes/__init__.py @@ -1,11 +1,14 @@ from ._core import ( DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, DomainModelWrite, DomainModelList, DomainRelationWrite, + GraphQLCore, + GraphQLList, ResourcesWrite, ResourcesWriteResult, ) @@ -14,6 +17,7 @@ AlertApply, AlertApplyList, AlertFields, + AlertGraphQL, AlertList, AlertTextFields, AlertWrite, @@ -24,6 +28,7 @@ BidDocumentApply, BidDocumentApplyList, BidDocumentFields, + BidDocumentGraphQL, BidDocumentList, BidDocumentTextFields, BidDocumentWrite, @@ -34,6 +39,7 @@ BidMethodApply, BidMethodApplyList, BidMethodFields, + BidMethodGraphQL, BidMethodList, BidMethodTextFields, BidMethodWrite, @@ -44,22 +50,27 @@ BidRowApply, BidRowApplyList, BidRowFields, + BidRowGraphQL, BidRowList, BidRowTextFields, BidRowWrite, BidRowWriteList, ) -from ._price_area import PriceArea, PriceAreaFields, PriceAreaList, PriceAreaTextFields +from ._price_area import PriceArea, PriceAreaFields, PriceAreaGraphQL, PriceAreaList, PriceAreaTextFields BidDocument.model_rebuild() +BidDocumentGraphQL.model_rebuild() BidDocumentWrite.model_rebuild() BidDocumentApply.model_rebuild() BidRow.model_rebuild() +BidRowGraphQL.model_rebuild() BidRowWrite.model_rebuild() BidRowApply.model_rebuild() + __all__ = [ "DataRecord", + "DataRecordGraphQL", "DataRecordWrite", "ResourcesWrite", "DomainModel", @@ -67,8 +78,11 @@ "DomainModelWrite", "DomainModelList", "DomainRelationWrite", + "GraphQLCore", + "GraphQLList", "ResourcesWriteResult", "Alert", + "AlertGraphQL", "AlertWrite", "AlertApply", "AlertList", @@ -77,6 +91,7 @@ "AlertFields", "AlertTextFields", "BidDocument", + "BidDocumentGraphQL", "BidDocumentWrite", "BidDocumentApply", "BidDocumentList", @@ -85,6 +100,7 @@ "BidDocumentFields", "BidDocumentTextFields", "BidMethod", + "BidMethodGraphQL", "BidMethodWrite", "BidMethodApply", "BidMethodList", @@ -93,6 +109,7 @@ "BidMethodFields", "BidMethodTextFields", "BidRow", + "BidRowGraphQL", "BidRowWrite", "BidRowApply", "BidRowList", @@ -101,6 +118,7 @@ "BidRowFields", "BidRowTextFields", "PriceArea", + "PriceAreaGraphQL", "PriceAreaList", "PriceAreaFields", "PriceAreaTextFields", diff --git a/cognite/powerops/client/_generated/afrr_bid/data_classes/_alert.py b/cognite/powerops/client/_generated/afrr_bid/data_classes/_alert.py index 1adf4be04..7317c06ff 100644 --- a/cognite/powerops/client/_generated/afrr_bid/data_classes/_alert.py +++ b/cognite/powerops/client/_generated/afrr_bid/data_classes/_alert.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,6 +19,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -49,6 +53,86 @@ } +class AlertGraphQL(GraphQLCore): + """This represents the reading version of alert, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the alert. + data_record: The data record of the alert node. + time: Timestamp that the alert occurred (within the workflow) + title: Summary description of the alert + description: Detailed description of the alert + severity: CRITICAL (calculation could not completed) WARNING (calculation completed, with major issue) INFO (calculation completed, with minor issues) + alert_type: Classification of the alert (not in current alerting implementation) + status_code: Unique status code for the alert. May be used by the frontend to avoid use of hardcoded description (i.e. like a translation) + event_ids: An array of associated alert CDF Events (e.g. SHOP Run events) + calculation_run: The identifier of the parent Bid Calculation (required so tha alerts can be created befor the BidDocument) + """ + + view_id = dm.ViewId("power-ops-shared", "Alert", "1") + time: Optional[datetime.datetime] = None + title: Optional[str] = None + description: Optional[str] = None + severity: Optional[str] = None + alert_type: Optional[str] = Field(None, alias="alertType") + status_code: Optional[int] = Field(None, alias="statusCode") + event_ids: Optional[list[int]] = Field(None, alias="eventIds") + calculation_run: Optional[str] = Field(None, alias="calculationRun") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> Alert: + """Convert this GraphQL format of alert to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Alert( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + time=self.time, + title=self.title, + description=self.description, + severity=self.severity, + alert_type=self.alert_type, + status_code=self.status_code, + event_ids=self.event_ids, + calculation_run=self.calculation_run, + ) + + def as_write(self) -> AlertWrite: + """Convert this GraphQL format of alert to the writing format.""" + return AlertWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + time=self.time, + title=self.title, + description=self.description, + severity=self.severity, + alert_type=self.alert_type, + status_code=self.status_code, + event_ids=self.event_ids, + calculation_run=self.calculation_run, + ) + + class Alert(DomainModel): """This represents the reading version of alert. @@ -140,6 +224,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -177,7 +262,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_document.py b/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_document.py index 05e26d904..8c71eca25 100644 --- a/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_document.py +++ b/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_document.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,13 +19,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_row import BidRow, BidRowWrite - from ._price_area import PriceArea + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_row import BidRow, BidRowGraphQL, BidRowWrite + from ._price_area import PriceArea, PriceAreaGraphQL __all__ = [ @@ -49,6 +53,94 @@ } +class BidDocumentGraphQL(GraphQLCore): + """This represents the reading version of bid document, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid document. + data_record: The data record of the bid document node. + name: Unique name for a given instance of a Bid Document. A combination of name, priceArea, date and startCalculation. + delivery_date: The date of the Bid. + start_calculation: Timestamp of when the Bid calculation workflow started. + end_calculation: Timestamp of when the Bid calculation workflow completed. + is_complete: Indicates that the Bid calculation workflow has completed (although has not necessarily succeeded). + alerts: An array of calculation level Alerts. + price_area: The price area field. + bids: An array of BidRows containing the Bid data. + """ + + view_id = dm.ViewId("power-ops-afrr-bid", "BidDocument", "1") + name: Optional[str] = None + delivery_date: Optional[datetime.date] = Field(None, alias="deliveryDate") + start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") + end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") + is_complete: Optional[bool] = Field(None, alias="isComplete") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + price_area: Optional[PriceAreaGraphQL] = Field(None, repr=False, alias="priceArea") + bids: Optional[list[BidRowGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "price_area", "bids", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidDocument: + """Convert this GraphQL format of bid document to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidDocument( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + delivery_date=self.delivery_date, + start_calculation=self.start_calculation, + end_calculation=self.end_calculation, + is_complete=self.is_complete, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + price_area=self.price_area.as_read() if isinstance(self.price_area, GraphQLCore) else self.price_area, + bids=[bid.as_read() if isinstance(bid, GraphQLCore) else bid for bid in self.bids or []], + ) + + def as_write(self) -> BidDocumentWrite: + """Convert this GraphQL format of bid document to the writing format.""" + return BidDocumentWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + delivery_date=self.delivery_date, + start_calculation=self.start_calculation, + end_calculation=self.end_calculation, + is_complete=self.is_complete, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + price_area=self.price_area.as_write() if isinstance(self.price_area, DomainModel) else self.price_area, + bids=[bid.as_write() if isinstance(bid, DomainModel) else bid for bid in self.bids or []], + ) + + class BidDocument(DomainModel): """This represents the reading version of bid document. @@ -77,9 +169,9 @@ class BidDocument(DomainModel): start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") is_complete: Optional[bool] = Field(None, alias="isComplete") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) price_area: Union[PriceArea, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") - bids: Union[list[BidRow], list[str], None] = Field(default=None, repr=False) + bids: Union[list[BidRow], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidDocumentWrite: """Convert this read version of bid document to the writing version.""" @@ -135,15 +227,16 @@ class BidDocumentWrite(DomainModelWrite): start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") is_complete: Optional[bool] = Field(None, alias="isComplete") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) price_area: Union[str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") - bids: Union[list[BidRowWrite], list[str], None] = Field(default=None, repr=False) + bids: Union[list[BidRowWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -182,7 +275,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -197,14 +290,26 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) edge_type = dm.DirectRelationReference("power-ops-types", "partialBid") for bid in self.bids or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=bid, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=bid, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_method.py b/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_method.py index 7d517e1d6..49a92ccb0 100644 --- a/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_method.py +++ b/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_method.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -38,6 +42,58 @@ } +class BidMethodGraphQL(GraphQLCore): + """This represents the reading version of bid method, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method. + data_record: The data record of the bid method node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("power-ops-afrr-bid", "BidMethod", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> BidMethod: + """Convert this GraphQL format of bid method to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethod( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> BidMethodWrite: + """Convert this GraphQL format of bid method to the writing format.""" + return BidMethodWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class BidMethod(DomainModel): """This represents the reading version of bid method. @@ -94,6 +150,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -110,7 +167,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_row.py b/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_row.py index c8cf9ba6d..010390d6a 100644 --- a/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_row.py +++ b/cognite/powerops/client/_generated/afrr_bid/data_classes/_bid_row.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method import BidMethod, BidMethodWrite - from ._bid_row import BidRow, BidRowWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method import BidMethod, BidMethodGraphQL, BidMethodWrite + from ._bid_row import BidRow, BidRowGraphQL, BidRowWrite __all__ = [ @@ -62,6 +66,110 @@ } +class BidRowGraphQL(GraphQLCore): + """This represents the reading version of bid row, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid row. + data_record: The data record of the bid row node. + price: Price in EUR/MW/h, rounded to nearest price step (0.1?) + quantity_per_hour: The capacity offered, per hour, in MW, rounded to nearest step size (5?) + product: The product field. + is_divisible: The is divisible field. + min_quantity: Min quantity, per hour. Only relevant for divisible Bids. The minimum capacity that must be accepted; this must be lower than capacityPerHour and is rounded to the nearest step (5 MW?)). + is_block: Indication if the row is part of a Block bid. If true: quantityPerHour must have the same value for consecutive hours (and no breaks). Block bids must be accepted for all hours or none. + exclusive_group_id: Other bids with the same ID are part of an exclusive group - only one of them can be accepted, and they must have the same direction (product). Not allowed for block bids. + linked_bid: The linked bid must have the opposite direction (link means that both or none must be accepted). Should be bi-directional. + asset_type: The asset type field. + asset_id: The asset id field. + method: The method field. + alerts: An array of associated alerts. + """ + + view_id = dm.ViewId("power-ops-afrr-bid", "BidRow", "1") + price: Optional[float] = None + quantity_per_hour: Optional[list[float]] = Field(None, alias="quantityPerHour") + product: Optional[str] = None + is_divisible: Optional[bool] = Field(None, alias="isDivisible") + min_quantity: Optional[list[float]] = Field(None, alias="minQuantity") + is_block: Optional[bool] = Field(None, alias="isBlock") + exclusive_group_id: Optional[str] = Field(None, alias="exclusiveGroupId") + linked_bid: Optional[BidRowGraphQL] = Field(None, repr=False, alias="linkedBid") + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + method: Optional[BidMethodGraphQL] = Field(None, repr=False) + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("linked_bid", "method", "alerts", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidRow: + """Convert this GraphQL format of bid row to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidRow( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + price=self.price, + quantity_per_hour=self.quantity_per_hour, + product=self.product, + is_divisible=self.is_divisible, + min_quantity=self.min_quantity, + is_block=self.is_block, + exclusive_group_id=self.exclusive_group_id, + linked_bid=self.linked_bid.as_read() if isinstance(self.linked_bid, GraphQLCore) else self.linked_bid, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + ) + + def as_write(self) -> BidRowWrite: + """Convert this GraphQL format of bid row to the writing format.""" + return BidRowWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + price=self.price, + quantity_per_hour=self.quantity_per_hour, + product=self.product, + is_divisible=self.is_divisible, + min_quantity=self.min_quantity, + is_block=self.is_block, + exclusive_group_id=self.exclusive_group_id, + linked_bid=self.linked_bid.as_write() if isinstance(self.linked_bid, DomainModel) else self.linked_bid, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + ) + + class BidRow(DomainModel): """This represents the reading version of bid row. @@ -98,7 +206,7 @@ class BidRow(DomainModel): asset_type: Optional[str] = Field(None, alias="assetType") asset_id: Optional[str] = Field(None, alias="assetId") method: Union[BidMethod, str, dm.NodeId, None] = Field(None, repr=False) - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidRowWrite: """Convert this read version of bid row to the writing version.""" @@ -166,13 +274,14 @@ class BidRowWrite(DomainModelWrite): asset_type: Optional[str] = Field(None, alias="assetType") asset_id: Optional[str] = Field(None, alias="assetId") method: Union[BidMethodWrite, str, dm.NodeId, None] = Field(None, repr=False) - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -225,7 +334,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -240,7 +349,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/afrr_bid/data_classes/_core.py b/cognite/powerops/client/_generated/afrr_bid/data_classes/_core.py index a0a580fc0..d9f0159d6 100644 --- a/cognite/powerops/client/_generated/afrr_bid/data_classes/_core.py +++ b/cognite/powerops/client/_generated/afrr_bid/data_classes/_core.py @@ -2,7 +2,7 @@ import datetime import warnings -from abc import abstractmethod +from abc import abstractmethod, ABC from collections import UserList from collections.abc import Collection, Mapping from dataclasses import dataclass, field @@ -61,9 +61,9 @@ def extend(self, other: ResourcesWrite) -> None: @dataclass class ResourcesWriteResult: - nodes: dm.NodeApplyResultList - edges: dm.EdgeApplyResultList - time_series: TimeSeriesList + nodes: dm.NodeApplyResultList = field(default_factory=lambda: dm.NodeApplyResultList([])) + edges: dm.EdgeApplyResultList = field(default_factory=lambda: dm.EdgeApplyResultList([])) + time_series: TimeSeriesList = field(default_factory=lambda: TimeSeriesList([])) # Arbitrary types are allowed to be able to use the TimeSeries class @@ -76,6 +76,66 @@ def _repr_html_(self) -> str: return self.to_pandas().to_frame("value")._repr_html_() # type: ignore[operator] +class DataRecordGraphQL(Core): + last_updated_time: Optional[datetime.datetime] = Field(None, alias="lastUpdatedTime") + created_time: Optional[datetime.datetime] = Field(None, alias="createdTime") + + +class GraphQLCore(Core, ABC): + view_id: ClassVar[dm.ViewId] + space: Optional[str] = None + external_id: Optional[str] = Field(None, alias="externalId") + data_record: Optional[DataRecordGraphQL] = Field(None, alias="dataRecord") + + +class GraphQLList(UserList): + def __init__(self, nodes: Collection[GraphQLCore] = None): + super().__init__(nodes or []) + + # The dunder implementations are to get proper type hints + def __iter__(self) -> Iterator[GraphQLCore]: + return super().__iter__() + + @overload + def __getitem__(self, item: int) -> GraphQLCore: ... + + @overload + def __getitem__(self, item: slice) -> GraphQLCore: ... + + def __getitem__(self, item: int | slice) -> GraphQLCore | GraphQLList: + if isinstance(item, slice): + return self.__class__(self.data[item]) + elif isinstance(item, int): + return self.data[item] + else: + raise TypeError(f"Expected int or slice, got {type(item)}") + + def dump(self) -> list[dict[str, Any]]: + return [node.model_dump() for node in self.data] + + def to_pandas(self) -> pd.DataFrame: + """ + Convert the list of nodes to a pandas.DataFrame. + + Returns: + A pandas.DataFrame with the nodes as rows. + """ + df = pd.DataFrame(self.dump()) + if df.empty: + df = pd.DataFrame(columns=GraphQLCore.model_fields) + # Reorder columns to have the most relevant first + id_columns = ["space", "external_id"] + end_columns = ["data_record"] + fixed_columns = set(id_columns + end_columns) + columns = ( + id_columns + [col for col in df if col not in fixed_columns] + [col for col in end_columns if col in df] + ) + return df[columns] + + def _repr_html_(self) -> str: + return self.to_pandas()._repr_html_() # type: ignore[operator] + + class DomainModelCore(Core): space: str external_id: str = Field(min_length=1, max_length=255, alias="externalId") @@ -199,14 +259,17 @@ class DataRecordWriteList(_DataRecordListCore[DataRecordWrite]): _INSTANCE = DataRecordWrite -class DomainModelWrite(DomainModelCore, extra=Extra.forbid, populate_by_name=True): +class DomainModelWrite(DomainModelCore, extra=Extra.ignore, populate_by_name=True): external_id_factory: ClassVar[Optional[Callable[[type[DomainModelWrite], dict], str]]] = None data_record: DataRecordWrite = Field(default_factory=DataRecordWrite) def to_instances_write( - self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False + self, + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, + write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: - return self._to_instances_write(set(), view_by_read_class, write_none) + return self._to_instances_write(set(), view_by_read_class, write_none, allow_version_increase) def to_instances_apply( self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False @@ -224,6 +287,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: raise NotImplementedError() @@ -340,12 +404,15 @@ def data_records(self) -> DataRecordWriteList: return DataRecordWriteList([node.data_record for node in self]) def to_instances_write( - self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False + self, + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, + write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: cache: set[tuple[str, str]] = set() domains = ResourcesWrite() for node in self: - result = node._to_instances_write(cache, view_by_read_class, write_none) + result = node._to_instances_write(cache, view_by_read_class, write_none, allow_version_increase) domains.extend(result) return domains @@ -425,24 +492,30 @@ def _to_instances_write( edge_type: dm.DirectRelationReference, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: raise NotImplementedError() @classmethod def create_edge( - cls, start_node: DomainModelWrite | str, end_node: DomainModelWrite | str, edge_type: dm.DirectRelationReference + cls, + start_node: DomainModelWrite | str | dm.NodeId, + end_node: DomainModelWrite | str | dm.NodeId, + edge_type: dm.DirectRelationReference, ) -> dm.EdgeApply: - if isinstance(start_node, DomainModelWrite): - space = start_node.space - elif isinstance(start_node, DomainModelWrite): + if isinstance(start_node, (DomainModelWrite, dm.NodeId)): space = start_node.space + elif isinstance(end_node, (DomainModelWrite, dm.NodeId)): + space = end_node.space else: - raise TypeError(f"Either pass in a start or end node of type {DomainRelationWrite.__name__}") + space = DEFAULT_INSTANCE_SPACE if isinstance(end_node, str): end_ref = dm.DirectRelationReference(space, end_node) elif isinstance(end_node, DomainModelWrite): end_ref = end_node.as_direct_reference() + elif isinstance(end_node, dm.NodeId): + end_ref = dm.DirectRelationReference(end_node.space, end_node.external_id) else: raise TypeError(f"Expected str or subclass of {DomainRelationWrite.__name__}, got {type(end_node)}") @@ -450,6 +523,8 @@ def create_edge( start_ref = dm.DirectRelationReference(space, start_node) elif isinstance(start_node, DomainModelWrite): start_ref = start_node.as_direct_reference() + elif isinstance(start_node, dm.NodeId): + start_ref = dm.DirectRelationReference(start_node.space, start_node.external_id) else: raise TypeError(f"Expected str or subclass of {DomainRelationWrite.__name__}, got {type(start_node)}") @@ -465,11 +540,12 @@ def create_edge( def from_edge_to_resources( cls, cache: set[tuple[str, str]], - start_node: DomainModelWrite | str, - end_node: DomainModelWrite | str, + start_node: DomainModelWrite | str | dm.NodeId, + end_node: DomainModelWrite | str | dm.NodeId, edge_type: dm.DirectRelationReference, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() edge = DomainRelationWrite.create_edge(start_node, end_node, edge_type) @@ -479,10 +555,20 @@ def from_edge_to_resources( cache.add((edge.space, edge.external_id)) if isinstance(end_node, DomainModelWrite): - other_resources = end_node._to_instances_write(cache, view_by_read_class, write_none) + other_resources = end_node._to_instances_write( + cache, + view_by_read_class, + write_none, + allow_version_increase, + ) resources.extend(other_resources) if isinstance(start_node, DomainModelWrite): - other_resources = start_node._to_instances_write(cache, view_by_read_class, write_none) + other_resources = start_node._to_instances_write( + cache, + view_by_read_class, + write_none, + allow_version_increase, + ) resources.extend(other_resources) return resources diff --git a/cognite/powerops/client/_generated/afrr_bid/data_classes/_price_area.py b/cognite/powerops/client/_generated/afrr_bid/data_classes/_price_area.py index f3f0823cb..ae72c3d75 100644 --- a/cognite/powerops/client/_generated/afrr_bid/data_classes/_price_area.py +++ b/cognite/powerops/client/_generated/afrr_bid/data_classes/_price_area.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,6 +19,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) @@ -63,6 +67,76 @@ } +class PriceAreaGraphQL(GraphQLCore): + """This represents the reading version of price area, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the price area. + data_record: The data record of the price area node. + name: Name for the PriceArea. + capacity_price_up: The capacity price up field. + capacity_price_down: The capacity price down field. + activation_price_up: The mFRR activation price (TBC) + activation_price_down: The mFRR activate price (TBC) + relative_activation: Value between -1 (100 % activation down) and 1 (100 % activation down) + total_capacity_allocation_up: The total capacity allocation up field. + total_capacity_allocation_down: The total capacity allocation down field. + own_capacity_allocation_up: The own capacity allocation up field. + own_capacity_allocation_down: The own capacity allocation down field. + """ + + view_id = dm.ViewId("power-ops-afrr-bid", "PriceArea", "1") + name: Optional[str] = None + capacity_price_up: Union[TimeSeries, str, None] = Field(None, alias="capacityPriceUp") + capacity_price_down: Union[TimeSeries, str, None] = Field(None, alias="capacityPriceDown") + activation_price_up: Union[TimeSeries, str, None] = Field(None, alias="activationPriceUp") + activation_price_down: Union[TimeSeries, str, None] = Field(None, alias="activationPriceDown") + relative_activation: Union[TimeSeries, str, None] = Field(None, alias="relativeActivation") + total_capacity_allocation_up: Union[TimeSeries, str, None] = Field(None, alias="totalCapacityAllocationUp") + total_capacity_allocation_down: Union[TimeSeries, str, None] = Field(None, alias="totalCapacityAllocationDown") + own_capacity_allocation_up: Union[TimeSeries, str, None] = Field(None, alias="ownCapacityAllocationUp") + own_capacity_allocation_down: Union[TimeSeries, str, None] = Field(None, alias="ownCapacityAllocationDown") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> PriceArea: + """Convert this GraphQL format of price area to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PriceArea( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + capacity_price_up=self.capacity_price_up, + capacity_price_down=self.capacity_price_down, + activation_price_up=self.activation_price_up, + activation_price_down=self.activation_price_down, + relative_activation=self.relative_activation, + total_capacity_allocation_up=self.total_capacity_allocation_up, + total_capacity_allocation_down=self.total_capacity_allocation_down, + own_capacity_allocation_up=self.own_capacity_allocation_up, + own_capacity_allocation_down=self.own_capacity_allocation_down, + ) + + class PriceArea(DomainModel): """This represents the reading version of price area. diff --git a/cognite/powerops/client/_generated/assets/_api/_core.py b/cognite/powerops/client/_generated/assets/_api/_core.py index ef3bfde5e..084fd47a0 100644 --- a/cognite/powerops/client/_generated/assets/_api/_core.py +++ b/cognite/powerops/client/_generated/assets/_api/_core.py @@ -21,6 +21,7 @@ DomainModelCore, DomainModelWrite, DomainRelationWrite, + GraphQLList, ResourcesWriteResult, T_DomainModel, T_DomainModelWrite, @@ -32,6 +33,8 @@ DomainModelCore, DomainRelation, ) +from cognite.powerops.client._generated.assets import data_classes + DEFAULT_LIMIT_READ = 25 DEFAULT_QUERY_LIMIT = 3 @@ -751,3 +754,44 @@ def _create_edge_filter( if filter: filters.append(filter) return dm.filters.And(*filters) + + +class GraphQLQueryResponse: + def __init__(self, data_model_id: dm.DataModelId): + self._output = GraphQLList([]) + self._data_class_by_type = _GRAPHQL_DATA_CLASS_BY_DATA_MODEL_BY_TYPE[data_model_id] + + def parse(self, response: dict[str, Any]) -> GraphQLList: + if "errors" in response: + raise RuntimeError(response["errors"]) + _, data = list(response.items())[0] + self._parse_item(data) + return self._output + + def _parse_item(self, data: dict[str, Any]) -> None: + if "items" in data: + for item in data["items"]: + self._parse_item(item) + elif "__typename" in data: + try: + item = self._data_class_by_type[data["__typename"]].model_validate(data) + except KeyError: + raise ValueError(f"Could not find class for type {data['__typename']}") + else: + self._output.append(item) + else: + raise RuntimeError("Missing '__typename' in GraphQL response. Cannot determine the type of the response.") + + +_GRAPHQL_DATA_CLASS_BY_DATA_MODEL_BY_TYPE = { + dm.DataModelId("power-ops-assets", "PowerAsset", "1"): { + "PriceArea": data_classes.PriceAreaGraphQL, + "Watercourse": data_classes.WatercourseGraphQL, + "Plant": data_classes.PlantGraphQL, + "Generator": data_classes.GeneratorGraphQL, + "Reservoir": data_classes.ReservoirGraphQL, + "TurbineEfficiencyCurve": data_classes.TurbineEfficiencyCurveGraphQL, + "GeneratorEfficiencyCurve": data_classes.GeneratorEfficiencyCurveGraphQL, + "BidMethod": data_classes.BidMethodGraphQL, + }, +} diff --git a/cognite/powerops/client/_generated/assets/_api_client.py b/cognite/powerops/client/_generated/assets/_api_client.py index 0a4581f82..973dec7b4 100644 --- a/cognite/powerops/client/_generated/assets/_api_client.py +++ b/cognite/powerops/client/_generated/assets/_api_client.py @@ -2,7 +2,7 @@ import warnings from pathlib import Path -from typing import Sequence +from typing import Any, Sequence from cognite.client import ClientConfig, CogniteClient, data_modeling as dm from cognite.client.data_classes import TimeSeriesList @@ -16,8 +16,8 @@ from ._api.reservoir import ReservoirAPI from ._api.turbine_efficiency_curve import TurbineEfficiencyCurveAPI from ._api.watercourse import WatercourseAPI -from ._api._core import SequenceNotStr -from .data_classes._core import DEFAULT_INSTANCE_SPACE +from ._api._core import SequenceNotStr, GraphQLQueryResponse +from .data_classes._core import DEFAULT_INSTANCE_SPACE, GraphQLList from . import data_classes @@ -26,9 +26,9 @@ class PowerAssetAPI: PowerAssetAPI Generated with: - pygen = 0.99.11 - cognite-sdk = 7.26.0 - pydantic = 2.6.3 + pygen = 0.99.14 + cognite-sdk = 7.26.2 + pydantic = 2.6.4 Data Model: space: power-ops-assets @@ -44,7 +44,7 @@ def __init__(self, config_or_client: CogniteClient | ClientConfig): else: raise ValueError(f"Expected CogniteClient or ClientConfig, got {type(config_or_client)}") # The client name is used for aggregated logging of Pygen Usage - client.config.client_name = "CognitePygen:0.99.11" + client.config.client_name = "CognitePygen:0.99.14" view_by_read_class = { data_classes.BidMethod: dm.ViewId("power-ops-shared", "BidMethod", "1"), @@ -73,6 +73,7 @@ def upsert( items: data_classes.DomainModelWrite | Sequence[data_classes.DomainModelWrite], replace: bool = False, write_none: bool = False, + allow_version_increase: bool = False, ) -> data_classes.ResourcesWriteResult: """Add or update (upsert) items. @@ -82,17 +83,27 @@ def upsert( Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. write_none (bool): This method will, by default, skip properties that are set to None. However, if you want to set properties to None, you can set this parameter to True. Note this only applies to properties that are nullable. + allow_version_increase (bool): If set to true, the version of the instance will be increased if the instance already exists. + If you get an error: 'A version conflict caused the ingest to fail', you can set this to true to allow + the version to increase. Returns: Created instance(s), i.e., nodes, edges, and time series. """ if isinstance(items, data_classes.DomainModelWrite): - instances = items.to_instances_write(self._view_by_read_class, write_none) + instances = items.to_instances_write(self._view_by_read_class, write_none, allow_version_increase) else: instances = data_classes.ResourcesWrite() cache: set[tuple[str, str]] = set() for item in items: - instances.extend(item._to_instances_write(cache, self._view_by_read_class, write_none)) + instances.extend( + item._to_instances_write( + cache, + self._view_by_read_class, + write_none, + allow_version_increase, + ) + ) result = self._client.data_modeling.instances.apply( nodes=instances.nodes, edges=instances.edges, @@ -160,6 +171,17 @@ def delete( nodes=[(space, id) for id in external_id], ) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the PowerAsset data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("power-ops-assets", "PowerAsset", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + @classmethod def azure_project( cls, tenant_id: str, client_id: str, client_secret: str, cdf_cluster: str, project: str diff --git a/cognite/powerops/client/_generated/assets/data_classes/__init__.py b/cognite/powerops/client/_generated/assets/data_classes/__init__.py index 01e5ad49c..11603cdaa 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/__init__.py +++ b/cognite/powerops/client/_generated/assets/data_classes/__init__.py @@ -1,11 +1,14 @@ from ._core import ( DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, DomainModelWrite, DomainModelList, DomainRelationWrite, + GraphQLCore, + GraphQLList, ResourcesWrite, ResourcesWriteResult, ) @@ -14,6 +17,7 @@ BidMethodApply, BidMethodApplyList, BidMethodFields, + BidMethodGraphQL, BidMethodList, BidMethodTextFields, BidMethodWrite, @@ -24,6 +28,7 @@ GeneratorApply, GeneratorApplyList, GeneratorFields, + GeneratorGraphQL, GeneratorList, GeneratorTextFields, GeneratorWrite, @@ -34,6 +39,7 @@ GeneratorEfficiencyCurveApply, GeneratorEfficiencyCurveApplyList, GeneratorEfficiencyCurveFields, + GeneratorEfficiencyCurveGraphQL, GeneratorEfficiencyCurveList, GeneratorEfficiencyCurveWrite, GeneratorEfficiencyCurveWriteList, @@ -43,6 +49,7 @@ PlantApply, PlantApplyList, PlantFields, + PlantGraphQL, PlantList, PlantTextFields, PlantWrite, @@ -53,6 +60,7 @@ PriceAreaApply, PriceAreaApplyList, PriceAreaFields, + PriceAreaGraphQL, PriceAreaList, PriceAreaTextFields, PriceAreaWrite, @@ -63,6 +71,7 @@ ReservoirApply, ReservoirApplyList, ReservoirFields, + ReservoirGraphQL, ReservoirList, ReservoirTextFields, ReservoirWrite, @@ -73,6 +82,7 @@ TurbineEfficiencyCurveApply, TurbineEfficiencyCurveApplyList, TurbineEfficiencyCurveFields, + TurbineEfficiencyCurveGraphQL, TurbineEfficiencyCurveList, TurbineEfficiencyCurveWrite, TurbineEfficiencyCurveWriteList, @@ -82,6 +92,7 @@ WatercourseApply, WatercourseApplyList, WatercourseFields, + WatercourseGraphQL, WatercourseList, WatercourseTextFields, WatercourseWrite, @@ -89,20 +100,26 @@ ) Generator.model_rebuild() +GeneratorGraphQL.model_rebuild() GeneratorWrite.model_rebuild() GeneratorApply.model_rebuild() Plant.model_rebuild() +PlantGraphQL.model_rebuild() PlantWrite.model_rebuild() PlantApply.model_rebuild() PriceArea.model_rebuild() +PriceAreaGraphQL.model_rebuild() PriceAreaWrite.model_rebuild() PriceAreaApply.model_rebuild() Watercourse.model_rebuild() +WatercourseGraphQL.model_rebuild() WatercourseWrite.model_rebuild() WatercourseApply.model_rebuild() + __all__ = [ "DataRecord", + "DataRecordGraphQL", "DataRecordWrite", "ResourcesWrite", "DomainModel", @@ -110,8 +127,11 @@ "DomainModelWrite", "DomainModelList", "DomainRelationWrite", + "GraphQLCore", + "GraphQLList", "ResourcesWriteResult", "BidMethod", + "BidMethodGraphQL", "BidMethodWrite", "BidMethodApply", "BidMethodList", @@ -120,6 +140,7 @@ "BidMethodFields", "BidMethodTextFields", "Generator", + "GeneratorGraphQL", "GeneratorWrite", "GeneratorApply", "GeneratorList", @@ -128,6 +149,7 @@ "GeneratorFields", "GeneratorTextFields", "GeneratorEfficiencyCurve", + "GeneratorEfficiencyCurveGraphQL", "GeneratorEfficiencyCurveWrite", "GeneratorEfficiencyCurveApply", "GeneratorEfficiencyCurveList", @@ -135,6 +157,7 @@ "GeneratorEfficiencyCurveApplyList", "GeneratorEfficiencyCurveFields", "Plant", + "PlantGraphQL", "PlantWrite", "PlantApply", "PlantList", @@ -143,6 +166,7 @@ "PlantFields", "PlantTextFields", "PriceArea", + "PriceAreaGraphQL", "PriceAreaWrite", "PriceAreaApply", "PriceAreaList", @@ -151,6 +175,7 @@ "PriceAreaFields", "PriceAreaTextFields", "Reservoir", + "ReservoirGraphQL", "ReservoirWrite", "ReservoirApply", "ReservoirList", @@ -159,6 +184,7 @@ "ReservoirFields", "ReservoirTextFields", "TurbineEfficiencyCurve", + "TurbineEfficiencyCurveGraphQL", "TurbineEfficiencyCurveWrite", "TurbineEfficiencyCurveApply", "TurbineEfficiencyCurveList", @@ -166,6 +192,7 @@ "TurbineEfficiencyCurveApplyList", "TurbineEfficiencyCurveFields", "Watercourse", + "WatercourseGraphQL", "WatercourseWrite", "WatercourseApply", "WatercourseList", diff --git a/cognite/powerops/client/_generated/assets/data_classes/_bid_method.py b/cognite/powerops/client/_generated/assets/data_classes/_bid_method.py index 3babc3672..9ce573eee 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_bid_method.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_bid_method.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -38,6 +42,58 @@ } +class BidMethodGraphQL(GraphQLCore): + """This represents the reading version of bid method, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method. + data_record: The data record of the bid method node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("power-ops-shared", "BidMethod", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> BidMethod: + """Convert this GraphQL format of bid method to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethod( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> BidMethodWrite: + """Convert this GraphQL format of bid method to the writing format.""" + return BidMethodWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class BidMethod(DomainModel): """This represents the reading version of bid method. @@ -94,6 +150,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -110,7 +167,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/assets/data_classes/_core.py b/cognite/powerops/client/_generated/assets/data_classes/_core.py index a0a580fc0..d9f0159d6 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_core.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_core.py @@ -2,7 +2,7 @@ import datetime import warnings -from abc import abstractmethod +from abc import abstractmethod, ABC from collections import UserList from collections.abc import Collection, Mapping from dataclasses import dataclass, field @@ -61,9 +61,9 @@ def extend(self, other: ResourcesWrite) -> None: @dataclass class ResourcesWriteResult: - nodes: dm.NodeApplyResultList - edges: dm.EdgeApplyResultList - time_series: TimeSeriesList + nodes: dm.NodeApplyResultList = field(default_factory=lambda: dm.NodeApplyResultList([])) + edges: dm.EdgeApplyResultList = field(default_factory=lambda: dm.EdgeApplyResultList([])) + time_series: TimeSeriesList = field(default_factory=lambda: TimeSeriesList([])) # Arbitrary types are allowed to be able to use the TimeSeries class @@ -76,6 +76,66 @@ def _repr_html_(self) -> str: return self.to_pandas().to_frame("value")._repr_html_() # type: ignore[operator] +class DataRecordGraphQL(Core): + last_updated_time: Optional[datetime.datetime] = Field(None, alias="lastUpdatedTime") + created_time: Optional[datetime.datetime] = Field(None, alias="createdTime") + + +class GraphQLCore(Core, ABC): + view_id: ClassVar[dm.ViewId] + space: Optional[str] = None + external_id: Optional[str] = Field(None, alias="externalId") + data_record: Optional[DataRecordGraphQL] = Field(None, alias="dataRecord") + + +class GraphQLList(UserList): + def __init__(self, nodes: Collection[GraphQLCore] = None): + super().__init__(nodes or []) + + # The dunder implementations are to get proper type hints + def __iter__(self) -> Iterator[GraphQLCore]: + return super().__iter__() + + @overload + def __getitem__(self, item: int) -> GraphQLCore: ... + + @overload + def __getitem__(self, item: slice) -> GraphQLCore: ... + + def __getitem__(self, item: int | slice) -> GraphQLCore | GraphQLList: + if isinstance(item, slice): + return self.__class__(self.data[item]) + elif isinstance(item, int): + return self.data[item] + else: + raise TypeError(f"Expected int or slice, got {type(item)}") + + def dump(self) -> list[dict[str, Any]]: + return [node.model_dump() for node in self.data] + + def to_pandas(self) -> pd.DataFrame: + """ + Convert the list of nodes to a pandas.DataFrame. + + Returns: + A pandas.DataFrame with the nodes as rows. + """ + df = pd.DataFrame(self.dump()) + if df.empty: + df = pd.DataFrame(columns=GraphQLCore.model_fields) + # Reorder columns to have the most relevant first + id_columns = ["space", "external_id"] + end_columns = ["data_record"] + fixed_columns = set(id_columns + end_columns) + columns = ( + id_columns + [col for col in df if col not in fixed_columns] + [col for col in end_columns if col in df] + ) + return df[columns] + + def _repr_html_(self) -> str: + return self.to_pandas()._repr_html_() # type: ignore[operator] + + class DomainModelCore(Core): space: str external_id: str = Field(min_length=1, max_length=255, alias="externalId") @@ -199,14 +259,17 @@ class DataRecordWriteList(_DataRecordListCore[DataRecordWrite]): _INSTANCE = DataRecordWrite -class DomainModelWrite(DomainModelCore, extra=Extra.forbid, populate_by_name=True): +class DomainModelWrite(DomainModelCore, extra=Extra.ignore, populate_by_name=True): external_id_factory: ClassVar[Optional[Callable[[type[DomainModelWrite], dict], str]]] = None data_record: DataRecordWrite = Field(default_factory=DataRecordWrite) def to_instances_write( - self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False + self, + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, + write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: - return self._to_instances_write(set(), view_by_read_class, write_none) + return self._to_instances_write(set(), view_by_read_class, write_none, allow_version_increase) def to_instances_apply( self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False @@ -224,6 +287,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: raise NotImplementedError() @@ -340,12 +404,15 @@ def data_records(self) -> DataRecordWriteList: return DataRecordWriteList([node.data_record for node in self]) def to_instances_write( - self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False + self, + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, + write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: cache: set[tuple[str, str]] = set() domains = ResourcesWrite() for node in self: - result = node._to_instances_write(cache, view_by_read_class, write_none) + result = node._to_instances_write(cache, view_by_read_class, write_none, allow_version_increase) domains.extend(result) return domains @@ -425,24 +492,30 @@ def _to_instances_write( edge_type: dm.DirectRelationReference, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: raise NotImplementedError() @classmethod def create_edge( - cls, start_node: DomainModelWrite | str, end_node: DomainModelWrite | str, edge_type: dm.DirectRelationReference + cls, + start_node: DomainModelWrite | str | dm.NodeId, + end_node: DomainModelWrite | str | dm.NodeId, + edge_type: dm.DirectRelationReference, ) -> dm.EdgeApply: - if isinstance(start_node, DomainModelWrite): - space = start_node.space - elif isinstance(start_node, DomainModelWrite): + if isinstance(start_node, (DomainModelWrite, dm.NodeId)): space = start_node.space + elif isinstance(end_node, (DomainModelWrite, dm.NodeId)): + space = end_node.space else: - raise TypeError(f"Either pass in a start or end node of type {DomainRelationWrite.__name__}") + space = DEFAULT_INSTANCE_SPACE if isinstance(end_node, str): end_ref = dm.DirectRelationReference(space, end_node) elif isinstance(end_node, DomainModelWrite): end_ref = end_node.as_direct_reference() + elif isinstance(end_node, dm.NodeId): + end_ref = dm.DirectRelationReference(end_node.space, end_node.external_id) else: raise TypeError(f"Expected str or subclass of {DomainRelationWrite.__name__}, got {type(end_node)}") @@ -450,6 +523,8 @@ def create_edge( start_ref = dm.DirectRelationReference(space, start_node) elif isinstance(start_node, DomainModelWrite): start_ref = start_node.as_direct_reference() + elif isinstance(start_node, dm.NodeId): + start_ref = dm.DirectRelationReference(start_node.space, start_node.external_id) else: raise TypeError(f"Expected str or subclass of {DomainRelationWrite.__name__}, got {type(start_node)}") @@ -465,11 +540,12 @@ def create_edge( def from_edge_to_resources( cls, cache: set[tuple[str, str]], - start_node: DomainModelWrite | str, - end_node: DomainModelWrite | str, + start_node: DomainModelWrite | str | dm.NodeId, + end_node: DomainModelWrite | str | dm.NodeId, edge_type: dm.DirectRelationReference, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() edge = DomainRelationWrite.create_edge(start_node, end_node, edge_type) @@ -479,10 +555,20 @@ def from_edge_to_resources( cache.add((edge.space, edge.external_id)) if isinstance(end_node, DomainModelWrite): - other_resources = end_node._to_instances_write(cache, view_by_read_class, write_none) + other_resources = end_node._to_instances_write( + cache, + view_by_read_class, + write_none, + allow_version_increase, + ) resources.extend(other_resources) if isinstance(start_node, DomainModelWrite): - other_resources = start_node._to_instances_write(cache, view_by_read_class, write_none) + other_resources = start_node._to_instances_write( + cache, + view_by_read_class, + write_none, + allow_version_increase, + ) resources.extend(other_resources) return resources diff --git a/cognite/powerops/client/_generated/assets/data_classes/_generator.py b/cognite/powerops/client/_generated/assets/data_classes/_generator.py index 20acd857f..b6fa489a3 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_generator.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_generator.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,13 +19,22 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._generator_efficiency_curve import GeneratorEfficiencyCurve, GeneratorEfficiencyCurveWrite - from ._turbine_efficiency_curve import TurbineEfficiencyCurve, TurbineEfficiencyCurveWrite + from ._generator_efficiency_curve import ( + GeneratorEfficiencyCurve, + GeneratorEfficiencyCurveGraphQL, + GeneratorEfficiencyCurveWrite, + ) + from ._turbine_efficiency_curve import ( + TurbineEfficiencyCurve, + TurbineEfficiencyCurveGraphQL, + TurbineEfficiencyCurveWrite, + ) __all__ = [ @@ -53,6 +65,114 @@ } +class GeneratorGraphQL(GraphQLCore): + """This represents the reading version of generator, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the generator. + data_record: The data record of the generator node. + name: Name for the Generator. + display_name: Display name for the Generator. + p_min: The p min field. + penstock: The penstock field. + start_cost: The start cost field. + start_stop_cost: The start stop cost field. + is_available_time_series: The is available time series field. + efficiency_curve: The efficiency curve field. + turbine_curves: The watercourses that are connected to the PriceArea. + """ + + view_id = dm.ViewId("power-ops-assets", "Generator", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + p_min: Optional[float] = Field(None, alias="pMin") + penstock: Optional[int] = None + start_cost: Optional[float] = Field(None, alias="startCost") + start_stop_cost: Union[TimeSeries, str, None] = Field(None, alias="startStopCost") + is_available_time_series: Union[TimeSeries, str, None] = Field(None, alias="isAvailableTimeSeries") + efficiency_curve: Optional[GeneratorEfficiencyCurveGraphQL] = Field(None, repr=False, alias="efficiencyCurve") + turbine_curves: Optional[list[TurbineEfficiencyCurveGraphQL]] = Field( + default=None, repr=False, alias="turbineCurves" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("efficiency_curve", "turbine_curves", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> Generator: + """Convert this GraphQL format of generator to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Generator( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + p_min=self.p_min, + penstock=self.penstock, + start_cost=self.start_cost, + start_stop_cost=self.start_stop_cost, + is_available_time_series=self.is_available_time_series, + efficiency_curve=( + self.efficiency_curve.as_read() + if isinstance(self.efficiency_curve, GraphQLCore) + else self.efficiency_curve + ), + turbine_curves=[ + turbine_curve.as_read() if isinstance(turbine_curve, GraphQLCore) else turbine_curve + for turbine_curve in self.turbine_curves or [] + ], + ) + + def as_write(self) -> GeneratorWrite: + """Convert this GraphQL format of generator to the writing format.""" + return GeneratorWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + p_min=self.p_min, + penstock=self.penstock, + start_cost=self.start_cost, + start_stop_cost=self.start_stop_cost, + is_available_time_series=self.is_available_time_series, + efficiency_curve=( + self.efficiency_curve.as_write() + if isinstance(self.efficiency_curve, DomainModel) + else self.efficiency_curve + ), + turbine_curves=[ + turbine_curve.as_write() if isinstance(turbine_curve, DomainModel) else turbine_curve + for turbine_curve in self.turbine_curves or [] + ], + ) + + class Generator(DomainModel): """This represents the reading version of generator. @@ -85,7 +205,7 @@ class Generator(DomainModel): efficiency_curve: Union[GeneratorEfficiencyCurve, str, dm.NodeId, None] = Field( None, repr=False, alias="efficiencyCurve" ) - turbine_curves: Union[list[TurbineEfficiencyCurve], list[str], None] = Field( + turbine_curves: Union[list[TurbineEfficiencyCurve], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="turbineCurves" ) @@ -155,7 +275,7 @@ class GeneratorWrite(DomainModelWrite): efficiency_curve: Union[GeneratorEfficiencyCurveWrite, str, dm.NodeId, None] = Field( None, repr=False, alias="efficiencyCurve" ) - turbine_curves: Union[list[TurbineEfficiencyCurveWrite], list[str], None] = Field( + turbine_curves: Union[list[TurbineEfficiencyCurveWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="turbineCurves" ) @@ -164,6 +284,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -214,7 +335,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -234,6 +355,8 @@ def _to_instances_write( end_node=turbine_curve, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/assets/data_classes/_generator_efficiency_curve.py b/cognite/powerops/client/_generated/assets/data_classes/_generator_efficiency_curve.py index 3468ec4ac..b939af70f 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_generator_efficiency_curve.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_generator_efficiency_curve.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -37,6 +41,66 @@ } +class GeneratorEfficiencyCurveGraphQL(GraphQLCore): + """This represents the reading version of generator efficiency curve, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the generator efficiency curve. + data_record: The data record of the generator efficiency curve node. + ref: The reference value + power: The generator power values + efficiency: The generator efficiency values + """ + + view_id = dm.ViewId("power-ops-assets", "GeneratorEfficiencyCurve", "1") + ref: Optional[float] = None + power: Optional[list[float]] = None + efficiency: Optional[list[float]] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> GeneratorEfficiencyCurve: + """Convert this GraphQL format of generator efficiency curve to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return GeneratorEfficiencyCurve( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + ref=self.ref, + power=self.power, + efficiency=self.efficiency, + ) + + def as_write(self) -> GeneratorEfficiencyCurveWrite: + """Convert this GraphQL format of generator efficiency curve to the writing format.""" + return GeneratorEfficiencyCurveWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + ref=self.ref, + power=self.power, + efficiency=self.efficiency, + ) + + class GeneratorEfficiencyCurve(DomainModel): """This represents the reading version of generator efficiency curve. @@ -107,6 +171,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -131,7 +196,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/assets/data_classes/_plant.py b/cognite/powerops/client/_generated/assets/data_classes/_plant.py index 3755ecfeb..4147bcb78 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_plant.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_plant.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,14 +19,15 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._generator import Generator, GeneratorWrite - from ._reservoir import Reservoir, ReservoirWrite - from ._watercourse import Watercourse, WatercourseWrite + from ._generator import Generator, GeneratorGraphQL, GeneratorWrite + from ._reservoir import Reservoir, ReservoirGraphQL, ReservoirWrite + from ._watercourse import Watercourse, WatercourseGraphQL, WatercourseWrite __all__ = [ @@ -88,6 +92,152 @@ } +class PlantGraphQL(GraphQLCore): + """This represents the reading version of plant, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the plant. + data_record: The data record of the plant node. + name: Name for the Plant. + display_name: The display name field. + ordering: The order of this plant + head_loss_factor: The head loss factor field. + outlet_level: The outlet level field. + p_max: The p max field. + p_min: The p min field. + penstock_head_loss_factors: The penstock head loss factor field. + watercourse: The watercourse field. + connection_losses: The connection loss field. + p_max_time_series: The p max time series field. + p_min_time_series: The p min time series field. + water_value_time_series: The water value time series field. + feeding_fee_time_series: The feeding fee time series field. + outlet_level_time_series: The outlet level time series field. + inlet_level_time_series: The inlet level time series field. + head_direct_time_series: The head direct time series field. + inlet_reservoir: The inlet reservoir field. + generators: The generator field. + """ + + view_id = dm.ViewId("power-ops-assets", "Plant", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + ordering: Optional[int] = None + head_loss_factor: Optional[float] = Field(None, alias="headLossFactor") + outlet_level: Optional[float] = Field(None, alias="outletLevel") + p_max: Optional[float] = Field(None, alias="pMax") + p_min: Optional[float] = Field(None, alias="pMin") + penstock_head_loss_factors: Optional[dict] = Field(None, alias="penstockHeadLossFactors") + watercourse: Optional[WatercourseGraphQL] = Field(None, repr=False) + connection_losses: Optional[float] = Field(None, alias="connectionLosses") + p_max_time_series: Union[TimeSeries, str, None] = Field(None, alias="pMaxTimeSeries") + p_min_time_series: Union[TimeSeries, str, None] = Field(None, alias="pMinTimeSeries") + water_value_time_series: Union[TimeSeries, str, None] = Field(None, alias="waterValueTimeSeries") + feeding_fee_time_series: Union[TimeSeries, str, None] = Field(None, alias="feedingFeeTimeSeries") + outlet_level_time_series: Union[TimeSeries, str, None] = Field(None, alias="outletLevelTimeSeries") + inlet_level_time_series: Union[TimeSeries, str, None] = Field(None, alias="inletLevelTimeSeries") + head_direct_time_series: Union[TimeSeries, str, None] = Field(None, alias="headDirectTimeSeries") + inlet_reservoir: Optional[ReservoirGraphQL] = Field(None, repr=False, alias="inletReservoir") + generators: Optional[list[GeneratorGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("watercourse", "inlet_reservoir", "generators", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> Plant: + """Convert this GraphQL format of plant to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Plant( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + head_loss_factor=self.head_loss_factor, + outlet_level=self.outlet_level, + p_max=self.p_max, + p_min=self.p_min, + penstock_head_loss_factors=self.penstock_head_loss_factors, + watercourse=self.watercourse.as_read() if isinstance(self.watercourse, GraphQLCore) else self.watercourse, + connection_losses=self.connection_losses, + p_max_time_series=self.p_max_time_series, + p_min_time_series=self.p_min_time_series, + water_value_time_series=self.water_value_time_series, + feeding_fee_time_series=self.feeding_fee_time_series, + outlet_level_time_series=self.outlet_level_time_series, + inlet_level_time_series=self.inlet_level_time_series, + head_direct_time_series=self.head_direct_time_series, + inlet_reservoir=( + self.inlet_reservoir.as_read() + if isinstance(self.inlet_reservoir, GraphQLCore) + else self.inlet_reservoir + ), + generators=[ + generator.as_read() if isinstance(generator, GraphQLCore) else generator + for generator in self.generators or [] + ], + ) + + def as_write(self) -> PlantWrite: + """Convert this GraphQL format of plant to the writing format.""" + return PlantWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + head_loss_factor=self.head_loss_factor, + outlet_level=self.outlet_level, + p_max=self.p_max, + p_min=self.p_min, + penstock_head_loss_factors=self.penstock_head_loss_factors, + watercourse=self.watercourse.as_write() if isinstance(self.watercourse, DomainModel) else self.watercourse, + connection_losses=self.connection_losses, + p_max_time_series=self.p_max_time_series, + p_min_time_series=self.p_min_time_series, + water_value_time_series=self.water_value_time_series, + feeding_fee_time_series=self.feeding_fee_time_series, + outlet_level_time_series=self.outlet_level_time_series, + inlet_level_time_series=self.inlet_level_time_series, + head_direct_time_series=self.head_direct_time_series, + inlet_reservoir=( + self.inlet_reservoir.as_write() + if isinstance(self.inlet_reservoir, DomainModel) + else self.inlet_reservoir + ), + generators=[ + generator.as_write() if isinstance(generator, DomainModel) else generator + for generator in self.generators or [] + ], + ) + + class Plant(DomainModel): """This represents the reading version of plant. @@ -138,7 +288,7 @@ class Plant(DomainModel): inlet_level_time_series: Union[TimeSeries, str, None] = Field(None, alias="inletLevelTimeSeries") head_direct_time_series: Union[TimeSeries, str, None] = Field(None, alias="headDirectTimeSeries") inlet_reservoir: Union[Reservoir, str, dm.NodeId, None] = Field(None, repr=False, alias="inletReservoir") - generators: Union[list[Generator], list[str], None] = Field(default=None, repr=False) + generators: Union[list[Generator], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> PlantWrite: """Convert this read version of plant to the writing version.""" @@ -234,13 +384,14 @@ class PlantWrite(DomainModelWrite): inlet_level_time_series: Union[TimeSeries, str, None] = Field(None, alias="inletLevelTimeSeries") head_direct_time_series: Union[TimeSeries, str, None] = Field(None, alias="headDirectTimeSeries") inlet_reservoir: Union[ReservoirWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="inletReservoir") - generators: Union[list[GeneratorWrite], list[str], None] = Field(default=None, repr=False) + generators: Union[list[GeneratorWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -337,7 +488,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -352,7 +503,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "isSubAssetOf") for generator in self.generators or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=generator, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=generator, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/assets/data_classes/_price_area.py b/cognite/powerops/client/_generated/assets/data_classes/_price_area.py index 0aa719152..a195517e0 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_price_area.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_price_area.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,14 +19,15 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._bid_method import BidMethod, BidMethodWrite - from ._plant import Plant, PlantWrite - from ._watercourse import Watercourse, WatercourseWrite + from ._bid_method import BidMethod, BidMethodGraphQL, BidMethodWrite + from ._plant import Plant, PlantGraphQL, PlantWrite + from ._watercourse import Watercourse, WatercourseGraphQL, WatercourseWrite __all__ = [ @@ -92,6 +96,148 @@ } +class PriceAreaGraphQL(GraphQLCore): + """This represents the reading version of price area, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the price area. + data_record: The data record of the price area node. + name: Name for the PriceArea. + display_name: Display name for the PriceArea. + description: Description for the PriceArea. + timezone: The timezone of the price area + capacity_price_up: The capacity price up field. + capacity_price_down: The capacity price down field. + activation_price_up: The mFRR activation price (TBC) + activation_price_down: The mFRR activate price (TBC) + relative_activation: Value between -1 (100 % activation down) and 1 (100 % activation down) + total_capacity_allocation_up: The total capacity allocation up field. + total_capacity_allocation_down: The total capacity allocation down field. + own_capacity_allocation_up: The own capacity allocation up field. + own_capacity_allocation_down: The own capacity allocation down field. + default_method_day_ahead: Default method for day ahead bids + main_scenario_day_ahead: Main scenario for day ahead bids + day_ahead_price: Day ahead price for the price area + plants: The plants that are connected to the Watercourse. + watercourses: The watercourses that are connected to the PriceArea. + """ + + view_id = dm.ViewId("power-ops-assets", "PriceArea", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + description: Optional[str] = None + timezone: Optional[str] = None + capacity_price_up: Union[TimeSeries, str, None] = Field(None, alias="capacityPriceUp") + capacity_price_down: Union[TimeSeries, str, None] = Field(None, alias="capacityPriceDown") + activation_price_up: Union[TimeSeries, str, None] = Field(None, alias="activationPriceUp") + activation_price_down: Union[TimeSeries, str, None] = Field(None, alias="activationPriceDown") + relative_activation: Union[TimeSeries, str, None] = Field(None, alias="relativeActivation") + total_capacity_allocation_up: Union[TimeSeries, str, None] = Field(None, alias="totalCapacityAllocationUp") + total_capacity_allocation_down: Union[TimeSeries, str, None] = Field(None, alias="totalCapacityAllocationDown") + own_capacity_allocation_up: Union[TimeSeries, str, None] = Field(None, alias="ownCapacityAllocationUp") + own_capacity_allocation_down: Union[TimeSeries, str, None] = Field(None, alias="ownCapacityAllocationDown") + default_method_day_ahead: Optional[BidMethodGraphQL] = Field(None, repr=False, alias="defaultMethodDayAhead") + main_scenario_day_ahead: Union[TimeSeries, str, None] = Field(None, alias="mainScenarioDayAhead") + day_ahead_price: Union[TimeSeries, str, None] = Field(None, alias="dayAheadPrice") + plants: Optional[list[PlantGraphQL]] = Field(default=None, repr=False) + watercourses: Optional[list[WatercourseGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("default_method_day_ahead", "plants", "watercourses", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> PriceArea: + """Convert this GraphQL format of price area to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PriceArea( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + description=self.description, + timezone=self.timezone, + capacity_price_up=self.capacity_price_up, + capacity_price_down=self.capacity_price_down, + activation_price_up=self.activation_price_up, + activation_price_down=self.activation_price_down, + relative_activation=self.relative_activation, + total_capacity_allocation_up=self.total_capacity_allocation_up, + total_capacity_allocation_down=self.total_capacity_allocation_down, + own_capacity_allocation_up=self.own_capacity_allocation_up, + own_capacity_allocation_down=self.own_capacity_allocation_down, + default_method_day_ahead=( + self.default_method_day_ahead.as_read() + if isinstance(self.default_method_day_ahead, GraphQLCore) + else self.default_method_day_ahead + ), + main_scenario_day_ahead=self.main_scenario_day_ahead, + day_ahead_price=self.day_ahead_price, + plants=[plant.as_read() if isinstance(plant, GraphQLCore) else plant for plant in self.plants or []], + watercourses=[ + watercourse.as_read() if isinstance(watercourse, GraphQLCore) else watercourse + for watercourse in self.watercourses or [] + ], + ) + + def as_write(self) -> PriceAreaWrite: + """Convert this GraphQL format of price area to the writing format.""" + return PriceAreaWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + description=self.description, + timezone=self.timezone, + capacity_price_up=self.capacity_price_up, + capacity_price_down=self.capacity_price_down, + activation_price_up=self.activation_price_up, + activation_price_down=self.activation_price_down, + relative_activation=self.relative_activation, + total_capacity_allocation_up=self.total_capacity_allocation_up, + total_capacity_allocation_down=self.total_capacity_allocation_down, + own_capacity_allocation_up=self.own_capacity_allocation_up, + own_capacity_allocation_down=self.own_capacity_allocation_down, + default_method_day_ahead=( + self.default_method_day_ahead.as_write() + if isinstance(self.default_method_day_ahead, DomainModel) + else self.default_method_day_ahead + ), + main_scenario_day_ahead=self.main_scenario_day_ahead, + day_ahead_price=self.day_ahead_price, + plants=[plant.as_write() if isinstance(plant, DomainModel) else plant for plant in self.plants or []], + watercourses=[ + watercourse.as_write() if isinstance(watercourse, DomainModel) else watercourse + for watercourse in self.watercourses or [] + ], + ) + + class PriceArea(DomainModel): """This represents the reading version of price area. @@ -141,8 +287,8 @@ class PriceArea(DomainModel): ) main_scenario_day_ahead: Union[TimeSeries, str, None] = Field(None, alias="mainScenarioDayAhead") day_ahead_price: Union[TimeSeries, str, None] = Field(None, alias="dayAheadPrice") - plants: Union[list[Plant], list[str], None] = Field(default=None, repr=False) - watercourses: Union[list[Watercourse], list[str], None] = Field(default=None, repr=False) + plants: Union[list[Plant], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) + watercourses: Union[list[Watercourse], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> PriceAreaWrite: """Convert this read version of price area to the writing version.""" @@ -236,14 +382,15 @@ class PriceAreaWrite(DomainModelWrite): ) main_scenario_day_ahead: Union[TimeSeries, str, None] = Field(None, alias="mainScenarioDayAhead") day_ahead_price: Union[TimeSeries, str, None] = Field(None, alias="dayAheadPrice") - plants: Union[list[PlantWrite], list[str], None] = Field(default=None, repr=False) - watercourses: Union[list[WatercourseWrite], list[str], None] = Field(default=None, repr=False) + plants: Union[list[PlantWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) + watercourses: Union[list[WatercourseWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -349,7 +496,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -364,14 +511,26 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "isSubAssetOf") for plant in self.plants or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=plant, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=plant, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) edge_type = dm.DirectRelationReference("power-ops-types", "isSubAssetOf") for watercourse in self.watercourses or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=watercourse, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=watercourse, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/assets/data_classes/_reservoir.py b/cognite/powerops/client/_generated/assets/data_classes/_reservoir.py index 23ac513ca..4230a8a98 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_reservoir.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_reservoir.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,6 +18,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -41,6 +45,66 @@ } +class ReservoirGraphQL(GraphQLCore): + """This represents the reading version of reservoir, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the reservoir. + data_record: The data record of the reservoir node. + name: Name for the PriceArea. + display_name: Display name for the PriceArea. + ordering: The ordering of the reservoirs + """ + + view_id = dm.ViewId("power-ops-assets", "Reservoir", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + ordering: Optional[int] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> Reservoir: + """Convert this GraphQL format of reservoir to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Reservoir( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + ) + + def as_write(self) -> ReservoirWrite: + """Convert this GraphQL format of reservoir to the writing format.""" + return ReservoirWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + ) + + class Reservoir(DomainModel): """This represents the reading version of reservoir. @@ -107,6 +171,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -129,7 +194,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/assets/data_classes/_turbine_efficiency_curve.py b/cognite/powerops/client/_generated/assets/data_classes/_turbine_efficiency_curve.py index d1d08822c..76a2803c8 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_turbine_efficiency_curve.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_turbine_efficiency_curve.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -37,6 +41,66 @@ } +class TurbineEfficiencyCurveGraphQL(GraphQLCore): + """This represents the reading version of turbine efficiency curve, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the turbine efficiency curve. + data_record: The data record of the turbine efficiency curve node. + head: The reference head values + flow: The flow values + efficiency: The turbine efficiency values + """ + + view_id = dm.ViewId("power-ops-assets", "TurbineEfficiencyCurve", "1") + head: Optional[float] = None + flow: Optional[list[float]] = None + efficiency: Optional[list[float]] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> TurbineEfficiencyCurve: + """Convert this GraphQL format of turbine efficiency curve to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return TurbineEfficiencyCurve( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + head=self.head, + flow=self.flow, + efficiency=self.efficiency, + ) + + def as_write(self) -> TurbineEfficiencyCurveWrite: + """Convert this GraphQL format of turbine efficiency curve to the writing format.""" + return TurbineEfficiencyCurveWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + head=self.head, + flow=self.flow, + efficiency=self.efficiency, + ) + + class TurbineEfficiencyCurve(DomainModel): """This represents the reading version of turbine efficiency curve. @@ -107,6 +171,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -131,7 +196,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/assets/data_classes/_watercourse.py b/cognite/powerops/client/_generated/assets/data_classes/_watercourse.py index df2204127..c93e4f538 100644 --- a/cognite/powerops/client/_generated/assets/data_classes/_watercourse.py +++ b/cognite/powerops/client/_generated/assets/data_classes/_watercourse.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,12 +19,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._plant import Plant, PlantWrite + from ._plant import Plant, PlantGraphQL, PlantWrite __all__ = [ @@ -47,6 +51,82 @@ } +class WatercourseGraphQL(GraphQLCore): + """This represents the reading version of watercourse, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the watercourse. + data_record: The data record of the watercourse node. + name: Name for the Watercourse. + display_name: Display name for the Watercourse. + production_obligation: The production obligation for the Watercourse. + penalty_limit: The penalty limit for the watercourse (used by SHOP). + plants: The plants that are connected to the Watercourse. + """ + + view_id = dm.ViewId("power-ops-assets", "Watercourse", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + production_obligation: Union[list[TimeSeries], list[str], None] = Field(None, alias="productionObligation") + penalty_limit: Optional[float] = Field(None, alias="penaltyLimit") + plants: Optional[list[PlantGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("plants", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> Watercourse: + """Convert this GraphQL format of watercourse to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Watercourse( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + production_obligation=self.production_obligation, + penalty_limit=self.penalty_limit, + plants=[plant.as_read() if isinstance(plant, GraphQLCore) else plant for plant in self.plants or []], + ) + + def as_write(self) -> WatercourseWrite: + """Convert this GraphQL format of watercourse to the writing format.""" + return WatercourseWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + production_obligation=self.production_obligation, + penalty_limit=self.penalty_limit, + plants=[plant.as_write() if isinstance(plant, DomainModel) else plant for plant in self.plants or []], + ) + + class Watercourse(DomainModel): """This represents the reading version of watercourse. @@ -69,7 +149,7 @@ class Watercourse(DomainModel): display_name: Optional[str] = Field(None, alias="displayName") production_obligation: Union[list[TimeSeries], list[str], None] = Field(None, alias="productionObligation") penalty_limit: Optional[float] = Field(None, alias="penaltyLimit") - plants: Union[list[Plant], list[str], None] = Field(default=None, repr=False) + plants: Union[list[Plant], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> WatercourseWrite: """Convert this read version of watercourse to the writing version.""" @@ -116,13 +196,14 @@ class WatercourseWrite(DomainModelWrite): display_name: Optional[str] = Field(None, alias="displayName") production_obligation: Union[list[TimeSeries], list[str], None] = Field(None, alias="productionObligation") penalty_limit: Optional[float] = Field(None, alias="penaltyLimit") - plants: Union[list[PlantWrite], list[str], None] = Field(default=None, repr=False) + plants: Union[list[PlantWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -150,7 +231,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -165,7 +246,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "isSubAssetOf") for plant in self.plants or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=plant, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=plant, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/day_ahead_bid/_api/_core.py b/cognite/powerops/client/_generated/day_ahead_bid/_api/_core.py index 5ff35fb51..4cecc1364 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/_api/_core.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/_api/_core.py @@ -21,6 +21,7 @@ DomainModelCore, DomainModelWrite, DomainRelationWrite, + GraphQLList, ResourcesWriteResult, T_DomainModel, T_DomainModelWrite, @@ -32,6 +33,8 @@ DomainModelCore, DomainRelation, ) +from cognite.powerops.client._generated.day_ahead_bid import data_classes + DEFAULT_LIMIT_READ = 25 DEFAULT_QUERY_LIMIT = 3 @@ -751,3 +754,47 @@ def _create_edge_filter( if filter: filters.append(filter) return dm.filters.And(*filters) + + +class GraphQLQueryResponse: + def __init__(self, data_model_id: dm.DataModelId): + self._output = GraphQLList([]) + self._data_class_by_type = _GRAPHQL_DATA_CLASS_BY_DATA_MODEL_BY_TYPE[data_model_id] + + def parse(self, response: dict[str, Any]) -> GraphQLList: + if "errors" in response: + raise RuntimeError(response["errors"]) + _, data = list(response.items())[0] + self._parse_item(data) + return self._output + + def _parse_item(self, data: dict[str, Any]) -> None: + if "items" in data: + for item in data["items"]: + self._parse_item(item) + elif "__typename" in data: + try: + item = self._data_class_by_type[data["__typename"]].model_validate(data) + except KeyError: + raise ValueError(f"Could not find class for type {data['__typename']}") + else: + self._output.append(item) + else: + raise RuntimeError("Missing '__typename' in GraphQL response. Cannot determine the type of the response.") + + +_GRAPHQL_DATA_CLASS_BY_DATA_MODEL_BY_TYPE = { + dm.DataModelId("power-ops-day-ahead-bid", "DayAheadBid", "1"): { + "BidDocument": data_classes.BidDocumentGraphQL, + "BidMatrix": data_classes.BidMatrixGraphQL, + "MultiScenarioMatrix": data_classes.MultiScenarioMatrixGraphQL, + "SHOPPriceScenario": data_classes.SHOPPriceScenarioGraphQL, + "SHOPPriceScenarioResult": data_classes.SHOPPriceScenarioResultGraphQL, + "BasicBidMatrix": data_classes.BasicBidMatrixGraphQL, + "SHOPMultiScenarioMethod": data_classes.SHOPMultiScenarioMethodGraphQL, + "WaterValueBasedMethod": data_classes.WaterValueBasedMethodGraphQL, + "PriceArea": data_classes.PriceAreaGraphQL, + "BidMethod": data_classes.BidMethodGraphQL, + "Alert": data_classes.AlertGraphQL, + }, +} diff --git a/cognite/powerops/client/_generated/day_ahead_bid/_api_client.py b/cognite/powerops/client/_generated/day_ahead_bid/_api_client.py index 924c01d26..634f58d5b 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/_api_client.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/_api_client.py @@ -2,7 +2,7 @@ import warnings from pathlib import Path -from typing import Sequence +from typing import Any, Sequence from cognite.client import ClientConfig, CogniteClient, data_modeling as dm from cognite.client.data_classes import TimeSeriesList @@ -19,8 +19,8 @@ from ._api.shop_price_scenario import SHOPPriceScenarioAPI from ._api.shop_price_scenario_result import SHOPPriceScenarioResultAPI from ._api.water_value_based_method import WaterValueBasedMethodAPI -from ._api._core import SequenceNotStr -from .data_classes._core import DEFAULT_INSTANCE_SPACE +from ._api._core import SequenceNotStr, GraphQLQueryResponse +from .data_classes._core import DEFAULT_INSTANCE_SPACE, GraphQLList from . import data_classes @@ -29,9 +29,9 @@ class DayAheadBidAPI: DayAheadBidAPI Generated with: - pygen = 0.99.11 - cognite-sdk = 7.26.0 - pydantic = 2.6.3 + pygen = 0.99.14 + cognite-sdk = 7.26.2 + pydantic = 2.6.4 Data Model: space: power-ops-day-ahead-bid @@ -47,7 +47,7 @@ def __init__(self, config_or_client: CogniteClient | ClientConfig): else: raise ValueError(f"Expected CogniteClient or ClientConfig, got {type(config_or_client)}") # The client name is used for aggregated logging of Pygen Usage - client.config.client_name = "CognitePygen:0.99.11" + client.config.client_name = "CognitePygen:0.99.14" view_by_read_class = { data_classes.Alert: dm.ViewId("power-ops-shared", "Alert", "1"), @@ -82,6 +82,7 @@ def upsert( items: data_classes.DomainModelWrite | Sequence[data_classes.DomainModelWrite], replace: bool = False, write_none: bool = False, + allow_version_increase: bool = False, ) -> data_classes.ResourcesWriteResult: """Add or update (upsert) items. @@ -91,17 +92,27 @@ def upsert( Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. write_none (bool): This method will, by default, skip properties that are set to None. However, if you want to set properties to None, you can set this parameter to True. Note this only applies to properties that are nullable. + allow_version_increase (bool): If set to true, the version of the instance will be increased if the instance already exists. + If you get an error: 'A version conflict caused the ingest to fail', you can set this to true to allow + the version to increase. Returns: Created instance(s), i.e., nodes, edges, and time series. """ if isinstance(items, data_classes.DomainModelWrite): - instances = items.to_instances_write(self._view_by_read_class, write_none) + instances = items.to_instances_write(self._view_by_read_class, write_none, allow_version_increase) else: instances = data_classes.ResourcesWrite() cache: set[tuple[str, str]] = set() for item in items: - instances.extend(item._to_instances_write(cache, self._view_by_read_class, write_none)) + instances.extend( + item._to_instances_write( + cache, + self._view_by_read_class, + write_none, + allow_version_increase, + ) + ) result = self._client.data_modeling.instances.apply( nodes=instances.nodes, edges=instances.edges, @@ -169,6 +180,17 @@ def delete( nodes=[(space, id) for id in external_id], ) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the DayAheadBid data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("power-ops-day-ahead-bid", "DayAheadBid", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + @classmethod def azure_project( cls, tenant_id: str, client_id: str, client_secret: str, cdf_cluster: str, project: str diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/__init__.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/__init__.py index c81701e82..57b2e9b86 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/__init__.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/__init__.py @@ -1,11 +1,14 @@ from ._core import ( DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, DomainModelWrite, DomainModelList, DomainRelationWrite, + GraphQLCore, + GraphQLList, ResourcesWrite, ResourcesWriteResult, ) @@ -14,6 +17,7 @@ AlertApply, AlertApplyList, AlertFields, + AlertGraphQL, AlertList, AlertTextFields, AlertWrite, @@ -24,6 +28,7 @@ BasicBidMatrixApply, BasicBidMatrixApplyList, BasicBidMatrixFields, + BasicBidMatrixGraphQL, BasicBidMatrixList, BasicBidMatrixTextFields, BasicBidMatrixWrite, @@ -34,6 +39,7 @@ BidDocumentApply, BidDocumentApplyList, BidDocumentFields, + BidDocumentGraphQL, BidDocumentList, BidDocumentTextFields, BidDocumentWrite, @@ -44,6 +50,7 @@ BidMatrixApply, BidMatrixApplyList, BidMatrixFields, + BidMatrixGraphQL, BidMatrixList, BidMatrixTextFields, BidMatrixWrite, @@ -54,6 +61,7 @@ BidMethodApply, BidMethodApplyList, BidMethodFields, + BidMethodGraphQL, BidMethodList, BidMethodTextFields, BidMethodWrite, @@ -64,6 +72,7 @@ MultiScenarioMatrixApply, MultiScenarioMatrixApplyList, MultiScenarioMatrixFields, + MultiScenarioMatrixGraphQL, MultiScenarioMatrixList, MultiScenarioMatrixTextFields, MultiScenarioMatrixWrite, @@ -74,6 +83,7 @@ PriceAreaApply, PriceAreaApplyList, PriceAreaFields, + PriceAreaGraphQL, PriceAreaList, PriceAreaTextFields, PriceAreaWrite, @@ -84,6 +94,7 @@ SHOPMultiScenarioMethodApply, SHOPMultiScenarioMethodApplyList, SHOPMultiScenarioMethodFields, + SHOPMultiScenarioMethodGraphQL, SHOPMultiScenarioMethodList, SHOPMultiScenarioMethodTextFields, SHOPMultiScenarioMethodWrite, @@ -94,6 +105,7 @@ SHOPPriceScenarioApply, SHOPPriceScenarioApplyList, SHOPPriceScenarioFields, + SHOPPriceScenarioGraphQL, SHOPPriceScenarioList, SHOPPriceScenarioTextFields, SHOPPriceScenarioWrite, @@ -104,6 +116,7 @@ SHOPPriceScenarioResultApply, SHOPPriceScenarioResultApplyList, SHOPPriceScenarioResultFields, + SHOPPriceScenarioResultGraphQL, SHOPPriceScenarioResultList, SHOPPriceScenarioResultWrite, SHOPPriceScenarioResultWriteList, @@ -113,6 +126,7 @@ WaterValueBasedMethodApply, WaterValueBasedMethodApplyList, WaterValueBasedMethodFields, + WaterValueBasedMethodGraphQL, WaterValueBasedMethodList, WaterValueBasedMethodTextFields, WaterValueBasedMethodWrite, @@ -120,29 +134,38 @@ ) BasicBidMatrix.model_rebuild() +BasicBidMatrixGraphQL.model_rebuild() BasicBidMatrixWrite.model_rebuild() BasicBidMatrixApply.model_rebuild() BidDocument.model_rebuild() +BidDocumentGraphQL.model_rebuild() BidDocumentWrite.model_rebuild() BidDocumentApply.model_rebuild() BidMatrix.model_rebuild() +BidMatrixGraphQL.model_rebuild() BidMatrixWrite.model_rebuild() BidMatrixApply.model_rebuild() MultiScenarioMatrix.model_rebuild() +MultiScenarioMatrixGraphQL.model_rebuild() MultiScenarioMatrixWrite.model_rebuild() MultiScenarioMatrixApply.model_rebuild() PriceArea.model_rebuild() +PriceAreaGraphQL.model_rebuild() PriceAreaWrite.model_rebuild() PriceAreaApply.model_rebuild() SHOPMultiScenarioMethod.model_rebuild() +SHOPMultiScenarioMethodGraphQL.model_rebuild() SHOPMultiScenarioMethodWrite.model_rebuild() SHOPMultiScenarioMethodApply.model_rebuild() SHOPPriceScenarioResult.model_rebuild() +SHOPPriceScenarioResultGraphQL.model_rebuild() SHOPPriceScenarioResultWrite.model_rebuild() SHOPPriceScenarioResultApply.model_rebuild() + __all__ = [ "DataRecord", + "DataRecordGraphQL", "DataRecordWrite", "ResourcesWrite", "DomainModel", @@ -150,8 +173,11 @@ "DomainModelWrite", "DomainModelList", "DomainRelationWrite", + "GraphQLCore", + "GraphQLList", "ResourcesWriteResult", "Alert", + "AlertGraphQL", "AlertWrite", "AlertApply", "AlertList", @@ -160,6 +186,7 @@ "AlertFields", "AlertTextFields", "BasicBidMatrix", + "BasicBidMatrixGraphQL", "BasicBidMatrixWrite", "BasicBidMatrixApply", "BasicBidMatrixList", @@ -168,6 +195,7 @@ "BasicBidMatrixFields", "BasicBidMatrixTextFields", "BidDocument", + "BidDocumentGraphQL", "BidDocumentWrite", "BidDocumentApply", "BidDocumentList", @@ -176,6 +204,7 @@ "BidDocumentFields", "BidDocumentTextFields", "BidMatrix", + "BidMatrixGraphQL", "BidMatrixWrite", "BidMatrixApply", "BidMatrixList", @@ -184,6 +213,7 @@ "BidMatrixFields", "BidMatrixTextFields", "BidMethod", + "BidMethodGraphQL", "BidMethodWrite", "BidMethodApply", "BidMethodList", @@ -192,6 +222,7 @@ "BidMethodFields", "BidMethodTextFields", "MultiScenarioMatrix", + "MultiScenarioMatrixGraphQL", "MultiScenarioMatrixWrite", "MultiScenarioMatrixApply", "MultiScenarioMatrixList", @@ -200,6 +231,7 @@ "MultiScenarioMatrixFields", "MultiScenarioMatrixTextFields", "PriceArea", + "PriceAreaGraphQL", "PriceAreaWrite", "PriceAreaApply", "PriceAreaList", @@ -208,6 +240,7 @@ "PriceAreaFields", "PriceAreaTextFields", "SHOPMultiScenarioMethod", + "SHOPMultiScenarioMethodGraphQL", "SHOPMultiScenarioMethodWrite", "SHOPMultiScenarioMethodApply", "SHOPMultiScenarioMethodList", @@ -216,6 +249,7 @@ "SHOPMultiScenarioMethodFields", "SHOPMultiScenarioMethodTextFields", "SHOPPriceScenario", + "SHOPPriceScenarioGraphQL", "SHOPPriceScenarioWrite", "SHOPPriceScenarioApply", "SHOPPriceScenarioList", @@ -224,6 +258,7 @@ "SHOPPriceScenarioFields", "SHOPPriceScenarioTextFields", "SHOPPriceScenarioResult", + "SHOPPriceScenarioResultGraphQL", "SHOPPriceScenarioResultWrite", "SHOPPriceScenarioResultApply", "SHOPPriceScenarioResultList", @@ -231,6 +266,7 @@ "SHOPPriceScenarioResultApplyList", "SHOPPriceScenarioResultFields", "WaterValueBasedMethod", + "WaterValueBasedMethodGraphQL", "WaterValueBasedMethodWrite", "WaterValueBasedMethodApply", "WaterValueBasedMethodList", diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_alert.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_alert.py index 1adf4be04..7317c06ff 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_alert.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_alert.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,6 +19,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -49,6 +53,86 @@ } +class AlertGraphQL(GraphQLCore): + """This represents the reading version of alert, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the alert. + data_record: The data record of the alert node. + time: Timestamp that the alert occurred (within the workflow) + title: Summary description of the alert + description: Detailed description of the alert + severity: CRITICAL (calculation could not completed) WARNING (calculation completed, with major issue) INFO (calculation completed, with minor issues) + alert_type: Classification of the alert (not in current alerting implementation) + status_code: Unique status code for the alert. May be used by the frontend to avoid use of hardcoded description (i.e. like a translation) + event_ids: An array of associated alert CDF Events (e.g. SHOP Run events) + calculation_run: The identifier of the parent Bid Calculation (required so tha alerts can be created befor the BidDocument) + """ + + view_id = dm.ViewId("power-ops-shared", "Alert", "1") + time: Optional[datetime.datetime] = None + title: Optional[str] = None + description: Optional[str] = None + severity: Optional[str] = None + alert_type: Optional[str] = Field(None, alias="alertType") + status_code: Optional[int] = Field(None, alias="statusCode") + event_ids: Optional[list[int]] = Field(None, alias="eventIds") + calculation_run: Optional[str] = Field(None, alias="calculationRun") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> Alert: + """Convert this GraphQL format of alert to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Alert( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + time=self.time, + title=self.title, + description=self.description, + severity=self.severity, + alert_type=self.alert_type, + status_code=self.status_code, + event_ids=self.event_ids, + calculation_run=self.calculation_run, + ) + + def as_write(self) -> AlertWrite: + """Convert this GraphQL format of alert to the writing format.""" + return AlertWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + time=self.time, + title=self.title, + description=self.description, + severity=self.severity, + alert_type=self.alert_type, + status_code=self.status_code, + event_ids=self.event_ids, + calculation_run=self.calculation_run, + ) + + class Alert(DomainModel): """This represents the reading version of alert. @@ -140,6 +224,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -177,7 +262,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_basic_bid_matrix.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_basic_bid_matrix.py index dd34bb276..2933095d0 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_basic_bid_matrix.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_basic_bid_matrix.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_matrix import BidMatrix, BidMatrixWrite if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method import BidMethod, BidMethodWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method import BidMethod, BidMethodGraphQL, BidMethodWrite __all__ = [ @@ -47,6 +51,86 @@ } +class BasicBidMatrixGraphQL(GraphQLCore): + """This represents the reading version of basic bid matrix, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the basic bid matrix. + data_record: The data record of the basic bid matrix node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + method: The method field. + alerts: The alert field. + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "BasicBidMatrix", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + method: Optional[BidMethodGraphQL] = Field(None, repr=False) + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("method", "alerts", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BasicBidMatrix: + """Convert this GraphQL format of basic bid matrix to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BasicBidMatrix( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + ) + + def as_write(self) -> BasicBidMatrixWrite: + """Convert this GraphQL format of basic bid matrix to the writing format.""" + return BasicBidMatrixWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + ) + + class BasicBidMatrix(BidMatrix): """This represents the reading version of basic bid matrix. @@ -118,6 +202,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -151,7 +236,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -166,7 +251,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_document.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_document.py index a98ed11dc..cb2ec00f2 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_document.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_document.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,14 +19,15 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_matrix import BidMatrix, BidMatrixWrite - from ._bid_method import BidMethod, BidMethodWrite - from ._price_area import PriceArea, PriceAreaWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_matrix import BidMatrix, BidMatrixGraphQL, BidMatrixWrite + from ._bid_method import BidMethod, BidMethodGraphQL, BidMethodWrite + from ._price_area import PriceArea, PriceAreaGraphQL, PriceAreaWrite __all__ = [ @@ -50,6 +54,106 @@ } +class BidDocumentGraphQL(GraphQLCore): + """This represents the reading version of bid document, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid document. + data_record: The data record of the bid document node. + name: Unique name for a given instance of a Bid Document. A combination of name, priceArea, date and startCalculation. + delivery_date: The date of the Bid. + start_calculation: Timestamp of when the Bid calculation workflow started. + end_calculation: Timestamp of when the Bid calculation workflow completed. + is_complete: Indicates that the Bid calculation workflow has completed (although has not necessarily succeeded). + alerts: An array of calculation level Alerts. + price_area: The price area field. + method: The method field. + total: The total field. + partials: The partial field. + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "BidDocument", "1") + name: Optional[str] = None + delivery_date: Optional[datetime.date] = Field(None, alias="deliveryDate") + start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") + end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") + is_complete: Optional[bool] = Field(None, alias="isComplete") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + price_area: Optional[PriceAreaGraphQL] = Field(None, repr=False, alias="priceArea") + method: Optional[BidMethodGraphQL] = Field(None, repr=False) + total: Optional[BidMatrixGraphQL] = Field(None, repr=False) + partials: Optional[list[BidMatrixGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "price_area", "method", "total", "partials", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidDocument: + """Convert this GraphQL format of bid document to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidDocument( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + delivery_date=self.delivery_date, + start_calculation=self.start_calculation, + end_calculation=self.end_calculation, + is_complete=self.is_complete, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + price_area=self.price_area.as_read() if isinstance(self.price_area, GraphQLCore) else self.price_area, + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + total=self.total.as_read() if isinstance(self.total, GraphQLCore) else self.total, + partials=[ + partial.as_read() if isinstance(partial, GraphQLCore) else partial for partial in self.partials or [] + ], + ) + + def as_write(self) -> BidDocumentWrite: + """Convert this GraphQL format of bid document to the writing format.""" + return BidDocumentWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + delivery_date=self.delivery_date, + start_calculation=self.start_calculation, + end_calculation=self.end_calculation, + is_complete=self.is_complete, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + price_area=self.price_area.as_write() if isinstance(self.price_area, DomainModel) else self.price_area, + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + total=self.total.as_write() if isinstance(self.total, DomainModel) else self.total, + partials=[ + partial.as_write() if isinstance(partial, DomainModel) else partial for partial in self.partials or [] + ], + ) + + class BidDocument(DomainModel): """This represents the reading version of bid document. @@ -80,11 +184,11 @@ class BidDocument(DomainModel): start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") is_complete: Optional[bool] = Field(None, alias="isComplete") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) price_area: Union[PriceArea, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") method: Union[BidMethod, str, dm.NodeId, None] = Field(None, repr=False) total: Union[BidMatrix, str, dm.NodeId, None] = Field(None, repr=False) - partials: Union[list[BidMatrix], list[str], None] = Field(default=None, repr=False) + partials: Union[list[BidMatrix], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidDocumentWrite: """Convert this read version of bid document to the writing version.""" @@ -146,17 +250,18 @@ class BidDocumentWrite(DomainModelWrite): start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") is_complete: Optional[bool] = Field(None, alias="isComplete") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) price_area: Union[PriceAreaWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") method: Union[BidMethodWrite, str, dm.NodeId, None] = Field(None, repr=False) total: Union[BidMatrixWrite, str, dm.NodeId, None] = Field(None, repr=False) - partials: Union[list[BidMatrixWrite], list[str], None] = Field(default=None, repr=False) + partials: Union[list[BidMatrixWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -209,7 +314,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -224,14 +329,26 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) edge_type = dm.DirectRelationReference("power-ops-types", "partialBid") for partial in self.partials or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=partial, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=partial, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_matrix.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_matrix.py index e942c6085..35e376fda 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_matrix.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_matrix.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,12 +18,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method import BidMethod, BidMethodWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method import BidMethod, BidMethodGraphQL, BidMethodWrite __all__ = [ @@ -46,6 +50,86 @@ } +class BidMatrixGraphQL(GraphQLCore): + """This represents the reading version of bid matrix, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid matrix. + data_record: The data record of the bid matrix node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + method: The method field. + alerts: The alert field. + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "BidMatrix", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + method: Optional[BidMethodGraphQL] = Field(None, repr=False) + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("method", "alerts", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidMatrix: + """Convert this GraphQL format of bid matrix to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMatrix( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + ) + + def as_write(self) -> BidMatrixWrite: + """Convert this GraphQL format of bid matrix to the writing format.""" + return BidMatrixWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + ) + + class BidMatrix(DomainModel): """This represents the reading version of bid matrix. @@ -70,7 +154,7 @@ class BidMatrix(DomainModel): asset_type: Optional[str] = Field(None, alias="assetType") asset_id: Optional[str] = Field(None, alias="assetId") method: Union[BidMethod, str, dm.NodeId, None] = Field(None, repr=False) - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidMatrixWrite: """Convert this read version of bid matrix to the writing version.""" @@ -120,13 +204,14 @@ class BidMatrixWrite(DomainModelWrite): asset_type: Optional[str] = Field(None, alias="assetType") asset_id: Optional[str] = Field(None, alias="assetId") method: Union[BidMethodWrite, str, dm.NodeId, None] = Field(None, repr=False) - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -158,7 +243,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -173,7 +258,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_method.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_method.py index ad8559ce8..2fcc038ab 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_method.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_bid_method.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -38,6 +42,58 @@ } +class BidMethodGraphQL(GraphQLCore): + """This represents the reading version of bid method, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method. + data_record: The data record of the bid method node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "BidMethod", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> BidMethod: + """Convert this GraphQL format of bid method to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethod( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> BidMethodWrite: + """Convert this GraphQL format of bid method to the writing format.""" + return BidMethodWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class BidMethod(DomainModel): """This represents the reading version of bid method. @@ -94,6 +150,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -110,7 +167,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_core.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_core.py index a0a580fc0..d9f0159d6 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_core.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_core.py @@ -2,7 +2,7 @@ import datetime import warnings -from abc import abstractmethod +from abc import abstractmethod, ABC from collections import UserList from collections.abc import Collection, Mapping from dataclasses import dataclass, field @@ -61,9 +61,9 @@ def extend(self, other: ResourcesWrite) -> None: @dataclass class ResourcesWriteResult: - nodes: dm.NodeApplyResultList - edges: dm.EdgeApplyResultList - time_series: TimeSeriesList + nodes: dm.NodeApplyResultList = field(default_factory=lambda: dm.NodeApplyResultList([])) + edges: dm.EdgeApplyResultList = field(default_factory=lambda: dm.EdgeApplyResultList([])) + time_series: TimeSeriesList = field(default_factory=lambda: TimeSeriesList([])) # Arbitrary types are allowed to be able to use the TimeSeries class @@ -76,6 +76,66 @@ def _repr_html_(self) -> str: return self.to_pandas().to_frame("value")._repr_html_() # type: ignore[operator] +class DataRecordGraphQL(Core): + last_updated_time: Optional[datetime.datetime] = Field(None, alias="lastUpdatedTime") + created_time: Optional[datetime.datetime] = Field(None, alias="createdTime") + + +class GraphQLCore(Core, ABC): + view_id: ClassVar[dm.ViewId] + space: Optional[str] = None + external_id: Optional[str] = Field(None, alias="externalId") + data_record: Optional[DataRecordGraphQL] = Field(None, alias="dataRecord") + + +class GraphQLList(UserList): + def __init__(self, nodes: Collection[GraphQLCore] = None): + super().__init__(nodes or []) + + # The dunder implementations are to get proper type hints + def __iter__(self) -> Iterator[GraphQLCore]: + return super().__iter__() + + @overload + def __getitem__(self, item: int) -> GraphQLCore: ... + + @overload + def __getitem__(self, item: slice) -> GraphQLCore: ... + + def __getitem__(self, item: int | slice) -> GraphQLCore | GraphQLList: + if isinstance(item, slice): + return self.__class__(self.data[item]) + elif isinstance(item, int): + return self.data[item] + else: + raise TypeError(f"Expected int or slice, got {type(item)}") + + def dump(self) -> list[dict[str, Any]]: + return [node.model_dump() for node in self.data] + + def to_pandas(self) -> pd.DataFrame: + """ + Convert the list of nodes to a pandas.DataFrame. + + Returns: + A pandas.DataFrame with the nodes as rows. + """ + df = pd.DataFrame(self.dump()) + if df.empty: + df = pd.DataFrame(columns=GraphQLCore.model_fields) + # Reorder columns to have the most relevant first + id_columns = ["space", "external_id"] + end_columns = ["data_record"] + fixed_columns = set(id_columns + end_columns) + columns = ( + id_columns + [col for col in df if col not in fixed_columns] + [col for col in end_columns if col in df] + ) + return df[columns] + + def _repr_html_(self) -> str: + return self.to_pandas()._repr_html_() # type: ignore[operator] + + class DomainModelCore(Core): space: str external_id: str = Field(min_length=1, max_length=255, alias="externalId") @@ -199,14 +259,17 @@ class DataRecordWriteList(_DataRecordListCore[DataRecordWrite]): _INSTANCE = DataRecordWrite -class DomainModelWrite(DomainModelCore, extra=Extra.forbid, populate_by_name=True): +class DomainModelWrite(DomainModelCore, extra=Extra.ignore, populate_by_name=True): external_id_factory: ClassVar[Optional[Callable[[type[DomainModelWrite], dict], str]]] = None data_record: DataRecordWrite = Field(default_factory=DataRecordWrite) def to_instances_write( - self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False + self, + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, + write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: - return self._to_instances_write(set(), view_by_read_class, write_none) + return self._to_instances_write(set(), view_by_read_class, write_none, allow_version_increase) def to_instances_apply( self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False @@ -224,6 +287,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: raise NotImplementedError() @@ -340,12 +404,15 @@ def data_records(self) -> DataRecordWriteList: return DataRecordWriteList([node.data_record for node in self]) def to_instances_write( - self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False + self, + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, + write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: cache: set[tuple[str, str]] = set() domains = ResourcesWrite() for node in self: - result = node._to_instances_write(cache, view_by_read_class, write_none) + result = node._to_instances_write(cache, view_by_read_class, write_none, allow_version_increase) domains.extend(result) return domains @@ -425,24 +492,30 @@ def _to_instances_write( edge_type: dm.DirectRelationReference, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: raise NotImplementedError() @classmethod def create_edge( - cls, start_node: DomainModelWrite | str, end_node: DomainModelWrite | str, edge_type: dm.DirectRelationReference + cls, + start_node: DomainModelWrite | str | dm.NodeId, + end_node: DomainModelWrite | str | dm.NodeId, + edge_type: dm.DirectRelationReference, ) -> dm.EdgeApply: - if isinstance(start_node, DomainModelWrite): - space = start_node.space - elif isinstance(start_node, DomainModelWrite): + if isinstance(start_node, (DomainModelWrite, dm.NodeId)): space = start_node.space + elif isinstance(end_node, (DomainModelWrite, dm.NodeId)): + space = end_node.space else: - raise TypeError(f"Either pass in a start or end node of type {DomainRelationWrite.__name__}") + space = DEFAULT_INSTANCE_SPACE if isinstance(end_node, str): end_ref = dm.DirectRelationReference(space, end_node) elif isinstance(end_node, DomainModelWrite): end_ref = end_node.as_direct_reference() + elif isinstance(end_node, dm.NodeId): + end_ref = dm.DirectRelationReference(end_node.space, end_node.external_id) else: raise TypeError(f"Expected str or subclass of {DomainRelationWrite.__name__}, got {type(end_node)}") @@ -450,6 +523,8 @@ def create_edge( start_ref = dm.DirectRelationReference(space, start_node) elif isinstance(start_node, DomainModelWrite): start_ref = start_node.as_direct_reference() + elif isinstance(start_node, dm.NodeId): + start_ref = dm.DirectRelationReference(start_node.space, start_node.external_id) else: raise TypeError(f"Expected str or subclass of {DomainRelationWrite.__name__}, got {type(start_node)}") @@ -465,11 +540,12 @@ def create_edge( def from_edge_to_resources( cls, cache: set[tuple[str, str]], - start_node: DomainModelWrite | str, - end_node: DomainModelWrite | str, + start_node: DomainModelWrite | str | dm.NodeId, + end_node: DomainModelWrite | str | dm.NodeId, edge_type: dm.DirectRelationReference, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() edge = DomainRelationWrite.create_edge(start_node, end_node, edge_type) @@ -479,10 +555,20 @@ def from_edge_to_resources( cache.add((edge.space, edge.external_id)) if isinstance(end_node, DomainModelWrite): - other_resources = end_node._to_instances_write(cache, view_by_read_class, write_none) + other_resources = end_node._to_instances_write( + cache, + view_by_read_class, + write_none, + allow_version_increase, + ) resources.extend(other_resources) if isinstance(start_node, DomainModelWrite): - other_resources = start_node._to_instances_write(cache, view_by_read_class, write_none) + other_resources = start_node._to_instances_write( + cache, + view_by_read_class, + write_none, + allow_version_increase, + ) resources.extend(other_resources) return resources diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_multi_scenario_matrix.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_multi_scenario_matrix.py index 5edf2733c..734cfd793 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_multi_scenario_matrix.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_multi_scenario_matrix.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,14 +18,19 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_matrix import BidMatrix, BidMatrixWrite if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method import BidMethod, BidMethodWrite - from ._shop_price_scenario_result import SHOPPriceScenarioResult, SHOPPriceScenarioResultWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method import BidMethod, BidMethodGraphQL, BidMethodWrite + from ._shop_price_scenario_result import ( + SHOPPriceScenarioResult, + SHOPPriceScenarioResultGraphQL, + SHOPPriceScenarioResultWrite, + ) __all__ = [ @@ -48,6 +56,98 @@ } +class MultiScenarioMatrixGraphQL(GraphQLCore): + """This represents the reading version of multi scenario matrix, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the multi scenario matrix. + data_record: The data record of the multi scenario matrix node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + method: The method field. + alerts: The alert field. + scenario_results: An array of results, one for each scenario. + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "MultiScenarioMatrix", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + method: Optional[BidMethodGraphQL] = Field(None, repr=False) + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + scenario_results: Optional[list[SHOPPriceScenarioResultGraphQL]] = Field( + default=None, repr=False, alias="scenarioResults" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("method", "alerts", "scenario_results", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> MultiScenarioMatrix: + """Convert this GraphQL format of multi scenario matrix to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return MultiScenarioMatrix( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + scenario_results=[ + scenario_result.as_read() if isinstance(scenario_result, GraphQLCore) else scenario_result + for scenario_result in self.scenario_results or [] + ], + ) + + def as_write(self) -> MultiScenarioMatrixWrite: + """Convert this GraphQL format of multi scenario matrix to the writing format.""" + return MultiScenarioMatrixWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + scenario_results=[ + scenario_result.as_write() if isinstance(scenario_result, DomainModel) else scenario_result + for scenario_result in self.scenario_results or [] + ], + ) + + class MultiScenarioMatrix(BidMatrix): """This represents the reading version of multi scenario matrix. @@ -69,7 +169,7 @@ class MultiScenarioMatrix(BidMatrix): node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "power-ops-types", "DayAheadMultiScenarioMatrix" ) - scenario_results: Union[list[SHOPPriceScenarioResult], list[str], None] = Field( + scenario_results: Union[list[SHOPPriceScenarioResult], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="scenarioResults" ) @@ -122,7 +222,7 @@ class MultiScenarioMatrixWrite(BidMatrixWrite): node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "power-ops-types", "DayAheadMultiScenarioMatrix" ) - scenario_results: Union[list[SHOPPriceScenarioResultWrite], list[str], None] = Field( + scenario_results: Union[list[SHOPPriceScenarioResultWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="scenarioResults" ) @@ -131,6 +231,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -164,7 +265,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -179,7 +280,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("power-ops-types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) @@ -191,6 +298,8 @@ def _to_instances_write( end_node=scenario_result, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_price_area.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_price_area.py index 4cb2dcfa4..74f706ee9 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_price_area.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_price_area.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,12 +19,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._bid_method import BidMethod, BidMethodWrite + from ._bid_method import BidMethod, BidMethodGraphQL, BidMethodWrite __all__ = [ @@ -47,6 +51,86 @@ } +class PriceAreaGraphQL(GraphQLCore): + """This represents the reading version of price area, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the price area. + data_record: The data record of the price area node. + name: The name field. + default_method: The default method field. + timezone: The timezone field. + main_scenario: The main scenario field. + price_scenarios: The price scenario field. + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "PriceArea", "1") + name: Optional[str] = None + default_method: Optional[BidMethodGraphQL] = Field(None, repr=False, alias="defaultMethod") + timezone: Optional[str] = None + main_scenario: Union[TimeSeries, str, None] = Field(None, alias="mainScenario") + price_scenarios: Union[list[TimeSeries], list[str], None] = Field(None, alias="priceScenarios") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("default_method", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> PriceArea: + """Convert this GraphQL format of price area to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PriceArea( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + default_method=( + self.default_method.as_read() if isinstance(self.default_method, GraphQLCore) else self.default_method + ), + timezone=self.timezone, + main_scenario=self.main_scenario, + price_scenarios=self.price_scenarios, + ) + + def as_write(self) -> PriceAreaWrite: + """Convert this GraphQL format of price area to the writing format.""" + return PriceAreaWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + default_method=( + self.default_method.as_write() if isinstance(self.default_method, DomainModel) else self.default_method + ), + timezone=self.timezone, + main_scenario=self.main_scenario, + price_scenarios=self.price_scenarios, + ) + + class PriceArea(DomainModel): """This represents the reading version of price area. @@ -125,6 +209,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -163,7 +248,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_multi_scenario_method.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_multi_scenario_method.py index 8b07d7b46..5a822d9b9 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_multi_scenario_method.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_multi_scenario_method.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,12 +18,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_method import BidMethod, BidMethodWrite if TYPE_CHECKING: - from ._shop_price_scenario import SHOPPriceScenario, SHOPPriceScenarioWrite + from ._shop_price_scenario import SHOPPriceScenario, SHOPPriceScenarioGraphQL, SHOPPriceScenarioWrite __all__ = [ @@ -44,6 +48,80 @@ } +class SHOPMultiScenarioMethodGraphQL(GraphQLCore): + """This represents the reading version of shop multi scenario method, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop multi scenario method. + data_record: The data record of the shop multi scenario method node. + name: Name for the BidMethod + shop_cases: The shop case field. + price_scenarios: An array of scenarios for this bid method. + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "SHOPMultiScenarioMethod", "1") + name: Optional[str] = None + shop_cases: Optional[list[str]] = Field(None, alias="shopCases") + price_scenarios: Optional[list[SHOPPriceScenarioGraphQL]] = Field(default=None, repr=False, alias="priceScenarios") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("price_scenarios", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> SHOPMultiScenarioMethod: + """Convert this GraphQL format of shop multi scenario method to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return SHOPMultiScenarioMethod( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + shop_cases=self.shop_cases, + price_scenarios=[ + price_scenario.as_read() if isinstance(price_scenario, GraphQLCore) else price_scenario + for price_scenario in self.price_scenarios or [] + ], + ) + + def as_write(self) -> SHOPMultiScenarioMethodWrite: + """Convert this GraphQL format of shop multi scenario method to the writing format.""" + return SHOPMultiScenarioMethodWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + shop_cases=self.shop_cases, + price_scenarios=[ + price_scenario.as_write() if isinstance(price_scenario, DomainModel) else price_scenario + for price_scenario in self.price_scenarios or [] + ], + ) + + class SHOPMultiScenarioMethod(BidMethod): """This represents the reading version of shop multi scenario method. @@ -62,7 +140,7 @@ class SHOPMultiScenarioMethod(BidMethod): "power-ops-types", "DayAheadSHOPMultiScenarioMethod" ) shop_cases: Optional[list[str]] = Field(None, alias="shopCases") - price_scenarios: Union[list[SHOPPriceScenario], list[str], None] = Field( + price_scenarios: Union[list[SHOPPriceScenario], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="priceScenarios" ) @@ -108,7 +186,7 @@ class SHOPMultiScenarioMethodWrite(BidMethodWrite): "power-ops-types", "DayAheadSHOPMultiScenarioMethod" ) shop_cases: Optional[list[str]] = Field(None, alias="shopCases") - price_scenarios: Union[list[SHOPPriceScenarioWrite], list[str], None] = Field( + price_scenarios: Union[list[SHOPPriceScenarioWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="priceScenarios" ) @@ -117,6 +195,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -138,7 +217,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -158,6 +237,8 @@ def _to_instances_write( end_node=price_scenario, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_price_scenario.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_price_scenario.py index 170b18510..2d60daa6c 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_price_scenario.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_price_scenario.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,6 +18,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) @@ -41,6 +45,62 @@ } +class SHOPPriceScenarioGraphQL(GraphQLCore): + """This represents the reading version of shop price scenario, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop price scenario. + data_record: The data record of the shop price scenario node. + name: Name for the BidMethod + price: The price field. + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "SHOPPriceScenario", "1") + name: Optional[str] = None + price: Union[TimeSeries, str, None] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> SHOPPriceScenario: + """Convert this GraphQL format of shop price scenario to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return SHOPPriceScenario( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + price=self.price, + ) + + def as_write(self) -> SHOPPriceScenarioWrite: + """Convert this GraphQL format of shop price scenario to the writing format.""" + return SHOPPriceScenarioWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + price=self.price, + ) + + class SHOPPriceScenario(DomainModel): """This represents the reading version of shop price scenario. @@ -102,6 +162,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -126,7 +187,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_price_scenario_result.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_price_scenario_result.py index 68377ba97..f13ac3dd2 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_price_scenario_result.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_shop_price_scenario_result.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,12 +19,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._shop_price_scenario import SHOPPriceScenario, SHOPPriceScenarioWrite + from ._shop_price_scenario import SHOPPriceScenario, SHOPPriceScenarioGraphQL, SHOPPriceScenarioWrite __all__ = [ @@ -45,6 +49,78 @@ } +class SHOPPriceScenarioResultGraphQL(GraphQLCore): + """This represents the reading version of shop price scenario result, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop price scenario result. + data_record: The data record of the shop price scenario result node. + price: The price field. + production: The production field. + price_scenario: The price scenario field. + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "SHOPPriceScenarioResult", "1") + price: Union[TimeSeries, str, None] = None + production: Union[TimeSeries, str, None] = None + price_scenario: Optional[SHOPPriceScenarioGraphQL] = Field(None, repr=False, alias="priceScenario") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("price_scenario", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> SHOPPriceScenarioResult: + """Convert this GraphQL format of shop price scenario result to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return SHOPPriceScenarioResult( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + price=self.price, + production=self.production, + price_scenario=( + self.price_scenario.as_read() if isinstance(self.price_scenario, GraphQLCore) else self.price_scenario + ), + ) + + def as_write(self) -> SHOPPriceScenarioResultWrite: + """Convert this GraphQL format of shop price scenario result to the writing format.""" + return SHOPPriceScenarioResultWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + price=self.price, + production=self.production, + price_scenario=( + self.price_scenario.as_write() if isinstance(self.price_scenario, DomainModel) else self.price_scenario + ), + ) + + class SHOPPriceScenarioResult(DomainModel): """This represents the reading version of shop price scenario result. @@ -117,6 +193,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -152,7 +229,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_water_value_based_method.py b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_water_value_based_method.py index 4153055b5..0c3f84847 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_water_value_based_method.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/data_classes/_water_value_based_method.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_method import BidMethod, BidMethodWrite @@ -39,6 +43,58 @@ } +class WaterValueBasedMethodGraphQL(GraphQLCore): + """This represents the reading version of water value based method, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the water value based method. + data_record: The data record of the water value based method node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("power-ops-day-ahead-bid", "WaterValueBasedMethod", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> WaterValueBasedMethod: + """Convert this GraphQL format of water value based method to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return WaterValueBasedMethod( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> WaterValueBasedMethodWrite: + """Convert this GraphQL format of water value based method to the writing format.""" + return WaterValueBasedMethodWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class WaterValueBasedMethod(BidMethod): """This represents the reading version of water value based method. @@ -95,6 +151,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -113,7 +170,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/_api/_core.py b/cognite/powerops/client/_generated/v1/_api/_core.py index 9b1c7e600..b3c48ba9d 100644 --- a/cognite/powerops/client/_generated/v1/_api/_core.py +++ b/cognite/powerops/client/_generated/v1/_api/_core.py @@ -21,6 +21,7 @@ DomainModelCore, DomainModelWrite, DomainRelationWrite, + GraphQLList, ResourcesWriteResult, T_DomainModel, T_DomainModelWrite, @@ -32,6 +33,8 @@ DomainModelCore, DomainRelation, ) +from cognite.powerops.client._generated.v1 import data_classes + DEFAULT_LIMIT_READ = 25 DEFAULT_QUERY_LIMIT = 3 @@ -751,3 +754,168 @@ def _create_edge_filter( if filter: filters.append(filter) return dm.filters.And(*filters) + + +class GraphQLQueryResponse: + def __init__(self, data_model_id: dm.DataModelId): + self._output = GraphQLList([]) + self._data_class_by_type = _GRAPHQL_DATA_CLASS_BY_DATA_MODEL_BY_TYPE[data_model_id] + + def parse(self, response: dict[str, Any]) -> GraphQLList: + if "errors" in response: + raise RuntimeError(response["errors"]) + _, data = list(response.items())[0] + self._parse_item(data) + return self._output + + def _parse_item(self, data: dict[str, Any]) -> None: + if "items" in data: + for item in data["items"]: + self._parse_item(item) + elif "__typename" in data: + try: + item = self._data_class_by_type[data["__typename"]].model_validate(data) + except KeyError: + raise ValueError(f"Could not find class for type {data['__typename']}") + else: + self._output.append(item) + else: + raise RuntimeError("Missing '__typename' in GraphQL response. Cannot determine the type of the response.") + + +_GRAPHQL_DATA_CLASS_BY_DATA_MODEL_BY_TYPE = { + dm.DataModelId("sp_powerops_models", "compute_SHOPBasedDayAhead", "1"): { + "TaskDispatcherShopInput": data_classes.TaskDispatcherShopInputGraphQL, + "TaskDispatcherShopOutput": data_classes.TaskDispatcherShopOutputGraphQL, + "PreprocessorInput": data_classes.PreprocessorInputGraphQL, + "PreprocessorOutput": data_classes.PreprocessorOutputGraphQL, + "SHOPTriggerInput": data_classes.SHOPTriggerInputGraphQL, + "SHOPTriggerOutput": data_classes.SHOPTriggerOutputGraphQL, + "ShopPartialBidCalculationInput": data_classes.ShopPartialBidCalculationInputGraphQL, + "ShopPartialBidCalculationOutput": data_classes.ShopPartialBidCalculationOutputGraphQL, + "BidMatrixRaw": data_classes.BidMatrixRawGraphQL, + "MultiScenarioMatrixRaw": data_classes.MultiScenarioMatrixRawGraphQL, + "MarketConfiguration": data_classes.MarketConfigurationGraphQL, + "BidMethodSHOPMultiScenario": data_classes.BidMethodSHOPMultiScenarioGraphQL, + "Scenario": data_classes.ScenarioGraphQL, + "Mapping": data_classes.MappingGraphQL, + "ModelTemplate": data_classes.ModelTemplateGraphQL, + "SHOPResult": data_classes.SHOPResultGraphQL, + "Case": data_classes.CaseGraphQL, + "SHOPResultPriceProd": data_classes.SHOPResultPriceProdGraphQL, + "Alert": data_classes.AlertGraphQL, + "PlantShop": data_classes.PlantShopGraphQL, + "WatercourseShop": data_classes.WatercourseShopGraphQL, + "BidConfigurationShop": data_classes.BidConfigurationShopGraphQL, + "PriceArea": data_classes.PriceAreaGraphQL, + "PriceProdCase": data_classes.PriceProdCaseGraphQL, + "SHOPTimeSeries": data_classes.SHOPTimeSeriesGraphQL, + "Commands": data_classes.CommandsGraphQL, + }, + dm.DataModelId("sp_powerops_models", "compute_TotalBidCalculation", "1"): { + "BidMatrixRaw": data_classes.BidMatrixRawGraphQL, + "MultiScenarioMatrixRaw": data_classes.MultiScenarioMatrixRawGraphQL, + "BidMatrix": data_classes.BidMatrixGraphQL, + "MultiScenarioMatrix": data_classes.MultiScenarioMatrixGraphQL, + "PartialPostProcessingInput": data_classes.PartialPostProcessingInputGraphQL, + "PartialPostProcessingOutput": data_classes.PartialPostProcessingOutputGraphQL, + "TotalBidMatrixCalculationInput": data_classes.TotalBidMatrixCalculationInputGraphQL, + "TotalBidMatrixCalculationOutput": data_classes.TotalBidMatrixCalculationOutputGraphQL, + "BidDocumentDayAhead": data_classes.BidDocumentDayAheadGraphQL, + "PriceArea": data_classes.PriceAreaGraphQL, + "BidMethodDayAhead": data_classes.BidMethodDayAheadGraphQL, + "BidMethodWaterValue": data_classes.BidMethodWaterValueGraphQL, + "BidMethodSHOPMultiScenario": data_classes.BidMethodSHOPMultiScenarioGraphQL, + "Alert": data_classes.AlertGraphQL, + "SHOPResult": data_classes.SHOPResultGraphQL, + "SHOPResultPriceProd": data_classes.SHOPResultPriceProdGraphQL, + "MarketConfiguration": data_classes.MarketConfigurationGraphQL, + "Scenario": data_classes.ScenarioGraphQL, + "ModelTemplate": data_classes.ModelTemplateGraphQL, + "Mapping": data_classes.MappingGraphQL, + "WatercourseShop": data_classes.WatercourseShopGraphQL, + "PriceProdCase": data_classes.PriceProdCaseGraphQL, + "Case": data_classes.CaseGraphQL, + "SHOPTimeSeries": data_classes.SHOPTimeSeriesGraphQL, + "Commands": data_classes.CommandsGraphQL, + }, + dm.DataModelId("sp_powerops_models", "compute_WaterValueBasedDayAheadBid", "1"): { + "TaskDispatcherWaterInput": data_classes.TaskDispatcherWaterInputGraphQL, + "TaskDispatcherWaterOutput": data_classes.TaskDispatcherWaterOutputGraphQL, + "BidCalculationTask": data_classes.BidCalculationTaskGraphQL, + "WaterPartialBidCalculationInput": data_classes.WaterPartialBidCalculationInputGraphQL, + "WaterPartialBidCalculationOutput": data_classes.WaterPartialBidCalculationOutputGraphQL, + "BidMatrixRaw": data_classes.BidMatrixRawGraphQL, + "BidMethodWaterValue": data_classes.BidMethodWaterValueGraphQL, + "Plant": data_classes.PlantGraphQL, + "Watercourse": data_classes.WatercourseGraphQL, + "Alert": data_classes.AlertGraphQL, + "BidConfigurationWater": data_classes.BidConfigurationWaterGraphQL, + "PriceArea": data_classes.PriceAreaGraphQL, + "MarketConfiguration": data_classes.MarketConfigurationGraphQL, + "Reservoir": data_classes.ReservoirGraphQL, + "Generator": data_classes.GeneratorGraphQL, + "GeneratorEfficiencyCurve": data_classes.GeneratorEfficiencyCurveGraphQL, + "TurbineEfficiencyCurve": data_classes.TurbineEfficiencyCurveGraphQL, + }, + dm.DataModelId("sp_powerops_models", "config_DayAheadConfiguration", "1"): { + "BidConfiguration": data_classes.BidConfigurationGraphQL, + "BidConfigurationShop": data_classes.BidConfigurationShopGraphQL, + "BidConfigurationWater": data_classes.BidConfigurationWaterGraphQL, + "MarketConfiguration": data_classes.MarketConfigurationGraphQL, + "Scenario": data_classes.ScenarioGraphQL, + "Mapping": data_classes.MappingGraphQL, + "ModelTemplate": data_classes.ModelTemplateGraphQL, + "Watercourse": data_classes.WatercourseGraphQL, + "WatercourseShop": data_classes.WatercourseShopGraphQL, + "Plant": data_classes.PlantGraphQL, + "PlantShop": data_classes.PlantShopGraphQL, + "Generator": data_classes.GeneratorGraphQL, + "Reservoir": data_classes.ReservoirGraphQL, + "TurbineEfficiencyCurve": data_classes.TurbineEfficiencyCurveGraphQL, + "GeneratorEfficiencyCurve": data_classes.GeneratorEfficiencyCurveGraphQL, + "BidMethodDayAhead": data_classes.BidMethodDayAheadGraphQL, + "BidMethodWaterValue": data_classes.BidMethodWaterValueGraphQL, + "BidMethodSHOPMultiScenario": data_classes.BidMethodSHOPMultiScenarioGraphQL, + "PriceArea": data_classes.PriceAreaGraphQL, + "BidMethod": data_classes.BidMethodGraphQL, + "Commands": data_classes.CommandsGraphQL, + }, + dm.DataModelId("sp_powerops_models", "frontend_AFRRBid", "1"): { + "BidDocumentAFRR": data_classes.BidDocumentAFRRGraphQL, + "BidRow": data_classes.BidRowGraphQL, + "PriceAreaAFRR": data_classes.PriceAreaAFRRGraphQL, + "BidMethodAFRR": data_classes.BidMethodAFRRGraphQL, + "Alert": data_classes.AlertGraphQL, + }, + dm.DataModelId("sp_powerops_models", "frontend_Asset", "1"): { + "PriceAreaAsset": data_classes.PriceAreaAssetGraphQL, + "Watercourse": data_classes.WatercourseGraphQL, + "Plant": data_classes.PlantGraphQL, + "Generator": data_classes.GeneratorGraphQL, + "Reservoir": data_classes.ReservoirGraphQL, + "TurbineEfficiencyCurve": data_classes.TurbineEfficiencyCurveGraphQL, + "GeneratorEfficiencyCurve": data_classes.GeneratorEfficiencyCurveGraphQL, + "BidMethodDayAhead": data_classes.BidMethodDayAheadGraphQL, + }, + dm.DataModelId("sp_powerops_models", "frontend_DayAheadBid", "1"): { + "BidDocumentDayAhead": data_classes.BidDocumentDayAheadGraphQL, + "BidMatrix": data_classes.BidMatrixGraphQL, + "MultiScenarioMatrix": data_classes.MultiScenarioMatrixGraphQL, + "BasicBidMatrix": data_classes.BasicBidMatrixGraphQL, + "CustomBidMatrix": data_classes.CustomBidMatrixGraphQL, + "BidMethodCustom": data_classes.BidMethodCustomGraphQL, + "BidMethodDayAhead": data_classes.BidMethodDayAheadGraphQL, + "PriceArea": data_classes.PriceAreaGraphQL, + "BidMethodSHOPMultiScenario": data_classes.BidMethodSHOPMultiScenarioGraphQL, + "BidMethodWaterValue": data_classes.BidMethodWaterValueGraphQL, + "Alert": data_classes.AlertGraphQL, + "Scenario": data_classes.ScenarioGraphQL, + "ModelTemplate": data_classes.ModelTemplateGraphQL, + "Mapping": data_classes.MappingGraphQL, + "WatercourseShop": data_classes.WatercourseShopGraphQL, + "PriceProdCase": data_classes.PriceProdCaseGraphQL, + "Commands": data_classes.CommandsGraphQL, + "Case": data_classes.CaseGraphQL, + }, +} diff --git a/cognite/powerops/client/_generated/v1/_api_client.py b/cognite/powerops/client/_generated/v1/_api_client.py index 3679218c7..2a8fc7ff9 100644 --- a/cognite/powerops/client/_generated/v1/_api_client.py +++ b/cognite/powerops/client/_generated/v1/_api_client.py @@ -65,8 +65,8 @@ from ._api.water_partial_bid_calculation_output import WaterPartialBidCalculationOutputAPI from ._api.watercourse import WatercourseAPI from ._api.watercourse_shop import WatercourseShopAPI -from ._api._core import SequenceNotStr -from .data_classes._core import DEFAULT_INSTANCE_SPACE +from ._api._core import SequenceNotStr, GraphQLQueryResponse +from .data_classes._core import DEFAULT_INSTANCE_SPACE, GraphQLList from . import data_classes @@ -115,6 +115,7 @@ def __init__(self, client: CogniteClient): data_classes.WatercourseShop: dm.ViewId("sp_powerops_models", "WatercourseShop", "1"), } self._view_by_read_class = view_by_read_class + self._client = client self.alert = AlertAPI(client, view_by_read_class) self.bid_configuration_shop = BidConfigurationShopAPI(client, view_by_read_class) @@ -143,6 +144,17 @@ def __init__(self, client: CogniteClient): self.task_dispatcher_shop_output = TaskDispatcherShopOutputAPI(client, view_by_read_class) self.watercourse_shop = WatercourseShopAPI(client, view_by_read_class) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the compute_SHOPBasedDayAhead data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("sp_powerops_models", "compute_SHOPBasedDayAhead", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + class TotalBidCalculationAPIs: """ @@ -190,6 +202,7 @@ def __init__(self, client: CogniteClient): data_classes.WatercourseShop: dm.ViewId("sp_powerops_models", "WatercourseShop", "1"), } self._view_by_read_class = view_by_read_class + self._client = client self.alert = AlertAPI(client, view_by_read_class) self.bid_document_day_ahead = BidDocumentDayAheadAPI(client, view_by_read_class) @@ -217,6 +230,17 @@ def __init__(self, client: CogniteClient): self.total_bid_matrix_calculation_output = TotalBidMatrixCalculationOutputAPI(client, view_by_read_class) self.watercourse_shop = WatercourseShopAPI(client, view_by_read_class) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the compute_TotalBidCalculation data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("sp_powerops_models", "compute_TotalBidCalculation", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + class WaterValueBasedDayAheadBidProcesAPIs: """ @@ -254,6 +278,7 @@ def __init__(self, client: CogniteClient): data_classes.Watercourse: dm.ViewId("sp_powerops_models", "Watercourse", "1"), } self._view_by_read_class = view_by_read_class + self._client = client self.alert = AlertAPI(client, view_by_read_class) self.bid_calculation_task = BidCalculationTaskAPI(client, view_by_read_class) @@ -273,6 +298,17 @@ def __init__(self, client: CogniteClient): self.water_partial_bid_calculation_output = WaterPartialBidCalculationOutputAPI(client, view_by_read_class) self.watercourse = WatercourseAPI(client, view_by_read_class) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the compute_WaterValueBasedDayAheadBid data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("sp_powerops_models", "compute_WaterValueBasedDayAheadBid", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + class DayAheadConfigurationAPIs: """ @@ -310,6 +346,7 @@ def __init__(self, client: CogniteClient): data_classes.WatercourseShop: dm.ViewId("sp_powerops_models", "WatercourseShop", "1"), } self._view_by_read_class = view_by_read_class + self._client = client self.bid_configuration = BidConfigurationAPI(client, view_by_read_class) self.bid_configuration_shop = BidConfigurationShopAPI(client, view_by_read_class) @@ -333,6 +370,17 @@ def __init__(self, client: CogniteClient): self.watercourse = WatercourseAPI(client, view_by_read_class) self.watercourse_shop = WatercourseShopAPI(client, view_by_read_class) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the config_DayAheadConfiguration data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("sp_powerops_models", "config_DayAheadConfiguration", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + class AFRRBidAPIs: """ @@ -354,6 +402,7 @@ def __init__(self, client: CogniteClient): data_classes.PriceAreaAFRR: dm.ViewId("sp_powerops_models", "PriceAreaAFRR", "1"), } self._view_by_read_class = view_by_read_class + self._client = client self.alert = AlertAPI(client, view_by_read_class) self.bid_document_afrr = BidDocumentAFRRAPI(client, view_by_read_class) @@ -361,6 +410,17 @@ def __init__(self, client: CogniteClient): self.bid_row = BidRowAPI(client, view_by_read_class) self.price_area_afrr = PriceAreaAFRRAPI(client, view_by_read_class) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the frontend_AFRRBid data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("sp_powerops_models", "frontend_AFRRBid", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + class PowerAssetAPIs: """ @@ -385,6 +445,7 @@ def __init__(self, client: CogniteClient): data_classes.Watercourse: dm.ViewId("sp_powerops_models", "Watercourse", "1"), } self._view_by_read_class = view_by_read_class + self._client = client self.bid_method_day_ahead = BidMethodDayAheadAPI(client, view_by_read_class) self.generator = GeneratorAPI(client, view_by_read_class) @@ -395,6 +456,17 @@ def __init__(self, client: CogniteClient): self.turbine_efficiency_curve = TurbineEfficiencyCurveAPI(client, view_by_read_class) self.watercourse = WatercourseAPI(client, view_by_read_class) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the frontend_Asset data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("sp_powerops_models", "frontend_Asset", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + class DayAheadBidAPIs: """ @@ -429,6 +501,7 @@ def __init__(self, client: CogniteClient): data_classes.WatercourseShop: dm.ViewId("sp_powerops_models", "WatercourseShop", "1"), } self._view_by_read_class = view_by_read_class + self._client = client self.alert = AlertAPI(client, view_by_read_class) self.basic_bid_matrix = BasicBidMatrixAPI(client, view_by_read_class) @@ -449,15 +522,26 @@ def __init__(self, client: CogniteClient): self.scenario = ScenarioAPI(client, view_by_read_class) self.watercourse_shop = WatercourseShopAPI(client, view_by_read_class) + def graphql_query(self, query: str, variables: dict[str, Any] | None = None) -> GraphQLList: + """Execute a GraphQl query against the frontend_DayAheadBid data model. + + Args: + query (str): The GraphQL query to issue. + variables (dict[str, Any] | None): An optional dict of variables to pass to the query. + """ + data_model_id = dm.DataModelId("sp_powerops_models", "frontend_DayAheadBid", "1") + result = self._client.data_modeling.graphql.query(data_model_id, query, variables) + return GraphQLQueryResponse(data_model_id).parse(result) + class PowerOpsModelsV1Client: """ PowerOpsModelsV1Client Generated with: - pygen = 0.99.11 - cognite-sdk = 7.26.0 - pydantic = 2.6.3 + pygen = 0.99.14 + cognite-sdk = 7.26.2 + pydantic = 2.6.4 """ @@ -469,7 +553,7 @@ def __init__(self, config_or_client: CogniteClient | ClientConfig): else: raise ValueError(f"Expected CogniteClient or ClientConfig, got {type(config_or_client)}") # The client name is used for aggregated logging of Pygen Usage - client.config.client_name = "CognitePygen:0.99.11" + client.config.client_name = "CognitePygen:0.99.14" self.shop_based_day_ahead_bid_process = SHOPBasedDayAheadBidProcesAPIs(client) self.total_bid_calculation = TotalBidCalculationAPIs(client) @@ -499,6 +583,7 @@ def upsert( items: data_classes.DomainModelWrite | Sequence[data_classes.DomainModelWrite], replace: bool = False, write_none: bool = False, + allow_version_increase: bool = False, ) -> data_classes.ResourcesWriteResult: """Add or update (upsert) items. @@ -508,17 +593,27 @@ def upsert( Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. write_none (bool): This method will, by default, skip properties that are set to None. However, if you want to set properties to None, you can set this parameter to True. Note this only applies to properties that are nullable. + allow_version_increase (bool): If set to true, the version of the instance will be increased if the instance already exists. + If you get an error: 'A version conflict caused the ingest to fail', you can set this to true to allow + the version to increase. Returns: Created instance(s), i.e., nodes, edges, and time series. """ if isinstance(items, data_classes.DomainModelWrite): - instances = items.to_instances_write(self._view_by_read_class, write_none) + instances = items.to_instances_write(self._view_by_read_class, write_none, allow_version_increase) else: instances = data_classes.ResourcesWrite() cache: set[tuple[str, str]] = set() for item in items: - instances.extend(item._to_instances_write(cache, self._view_by_read_class, write_none)) + instances.extend( + item._to_instances_write( + cache, + self._view_by_read_class, + write_none, + allow_version_increase, + ) + ) result = self._client.data_modeling.instances.apply( nodes=instances.nodes, edges=instances.edges, diff --git a/cognite/powerops/client/_generated/v1/data_classes/__init__.py b/cognite/powerops/client/_generated/v1/data_classes/__init__.py index 01c7c2fcc..4896dce45 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/__init__.py +++ b/cognite/powerops/client/_generated/v1/data_classes/__init__.py @@ -1,11 +1,14 @@ from ._core import ( DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, DomainModelWrite, DomainModelList, DomainRelationWrite, + GraphQLCore, + GraphQLList, ResourcesWrite, ResourcesWriteResult, ) @@ -14,6 +17,7 @@ AlertApply, AlertApplyList, AlertFields, + AlertGraphQL, AlertList, AlertTextFields, AlertWrite, @@ -24,6 +28,7 @@ BasicBidMatrixApply, BasicBidMatrixApplyList, BasicBidMatrixFields, + BasicBidMatrixGraphQL, BasicBidMatrixList, BasicBidMatrixTextFields, BasicBidMatrixWrite, @@ -34,6 +39,7 @@ BidCalculationTaskApply, BidCalculationTaskApplyList, BidCalculationTaskFields, + BidCalculationTaskGraphQL, BidCalculationTaskList, BidCalculationTaskWrite, BidCalculationTaskWriteList, @@ -42,6 +48,7 @@ BidConfiguration, BidConfigurationApply, BidConfigurationApplyList, + BidConfigurationGraphQL, BidConfigurationList, BidConfigurationWrite, BidConfigurationWriteList, @@ -51,6 +58,7 @@ BidConfigurationShopApply, BidConfigurationShopApplyList, BidConfigurationShopFields, + BidConfigurationShopGraphQL, BidConfigurationShopList, BidConfigurationShopTextFields, BidConfigurationShopWrite, @@ -60,6 +68,7 @@ BidConfigurationWater, BidConfigurationWaterApply, BidConfigurationWaterApplyList, + BidConfigurationWaterGraphQL, BidConfigurationWaterList, BidConfigurationWaterWrite, BidConfigurationWaterWriteList, @@ -69,6 +78,7 @@ BidDocumentAFRRApply, BidDocumentAFRRApplyList, BidDocumentAFRRFields, + BidDocumentAFRRGraphQL, BidDocumentAFRRList, BidDocumentAFRRTextFields, BidDocumentAFRRWrite, @@ -79,6 +89,7 @@ BidDocumentDayAheadApply, BidDocumentDayAheadApplyList, BidDocumentDayAheadFields, + BidDocumentDayAheadGraphQL, BidDocumentDayAheadList, BidDocumentDayAheadTextFields, BidDocumentDayAheadWrite, @@ -89,6 +100,7 @@ BidMatrixApply, BidMatrixApplyList, BidMatrixFields, + BidMatrixGraphQL, BidMatrixList, BidMatrixTextFields, BidMatrixWrite, @@ -99,6 +111,7 @@ BidMatrixRawApply, BidMatrixRawApplyList, BidMatrixRawFields, + BidMatrixRawGraphQL, BidMatrixRawList, BidMatrixRawTextFields, BidMatrixRawWrite, @@ -109,6 +122,7 @@ BidMethodApply, BidMethodApplyList, BidMethodFields, + BidMethodGraphQL, BidMethodList, BidMethodTextFields, BidMethodWrite, @@ -119,6 +133,7 @@ BidMethodAFRRApply, BidMethodAFRRApplyList, BidMethodAFRRFields, + BidMethodAFRRGraphQL, BidMethodAFRRList, BidMethodAFRRTextFields, BidMethodAFRRWrite, @@ -129,6 +144,7 @@ BidMethodCustomApply, BidMethodCustomApplyList, BidMethodCustomFields, + BidMethodCustomGraphQL, BidMethodCustomList, BidMethodCustomTextFields, BidMethodCustomWrite, @@ -139,6 +155,7 @@ BidMethodDayAheadApply, BidMethodDayAheadApplyList, BidMethodDayAheadFields, + BidMethodDayAheadGraphQL, BidMethodDayAheadList, BidMethodDayAheadTextFields, BidMethodDayAheadWrite, @@ -149,6 +166,7 @@ BidMethodSHOPMultiScenarioApply, BidMethodSHOPMultiScenarioApplyList, BidMethodSHOPMultiScenarioFields, + BidMethodSHOPMultiScenarioGraphQL, BidMethodSHOPMultiScenarioList, BidMethodSHOPMultiScenarioTextFields, BidMethodSHOPMultiScenarioWrite, @@ -159,6 +177,7 @@ BidMethodWaterValueApply, BidMethodWaterValueApplyList, BidMethodWaterValueFields, + BidMethodWaterValueGraphQL, BidMethodWaterValueList, BidMethodWaterValueTextFields, BidMethodWaterValueWrite, @@ -169,17 +188,19 @@ BidRowApply, BidRowApplyList, BidRowFields, + BidRowGraphQL, BidRowList, BidRowTextFields, BidRowWrite, BidRowWriteList, ) -from ._case import Case, CaseApply, CaseApplyList, CaseFields, CaseList, CaseWrite, CaseWriteList +from ._case import Case, CaseApply, CaseApplyList, CaseFields, CaseGraphQL, CaseList, CaseWrite, CaseWriteList from ._commands import ( Commands, CommandsApply, CommandsApplyList, CommandsFields, + CommandsGraphQL, CommandsList, CommandsTextFields, CommandsWrite, @@ -190,6 +211,7 @@ CustomBidMatrixApply, CustomBidMatrixApplyList, CustomBidMatrixFields, + CustomBidMatrixGraphQL, CustomBidMatrixList, CustomBidMatrixTextFields, CustomBidMatrixWrite, @@ -200,6 +222,7 @@ GeneratorApply, GeneratorApplyList, GeneratorFields, + GeneratorGraphQL, GeneratorList, GeneratorTextFields, GeneratorWrite, @@ -210,6 +233,7 @@ GeneratorEfficiencyCurveApply, GeneratorEfficiencyCurveApplyList, GeneratorEfficiencyCurveFields, + GeneratorEfficiencyCurveGraphQL, GeneratorEfficiencyCurveList, GeneratorEfficiencyCurveWrite, GeneratorEfficiencyCurveWriteList, @@ -219,6 +243,7 @@ MappingApply, MappingApplyList, MappingFields, + MappingGraphQL, MappingList, MappingTextFields, MappingWrite, @@ -229,6 +254,7 @@ MarketConfigurationApply, MarketConfigurationApplyList, MarketConfigurationFields, + MarketConfigurationGraphQL, MarketConfigurationList, MarketConfigurationTextFields, MarketConfigurationWrite, @@ -239,6 +265,7 @@ ModelTemplateApply, ModelTemplateApplyList, ModelTemplateFields, + ModelTemplateGraphQL, ModelTemplateList, ModelTemplateTextFields, ModelTemplateWrite, @@ -249,6 +276,7 @@ MultiScenarioMatrixApply, MultiScenarioMatrixApplyList, MultiScenarioMatrixFields, + MultiScenarioMatrixGraphQL, MultiScenarioMatrixList, MultiScenarioMatrixTextFields, MultiScenarioMatrixWrite, @@ -259,6 +287,7 @@ MultiScenarioMatrixRawApply, MultiScenarioMatrixRawApplyList, MultiScenarioMatrixRawFields, + MultiScenarioMatrixRawGraphQL, MultiScenarioMatrixRawList, MultiScenarioMatrixRawTextFields, MultiScenarioMatrixRawWrite, @@ -269,6 +298,7 @@ PartialPostProcessingInputApply, PartialPostProcessingInputApplyList, PartialPostProcessingInputFields, + PartialPostProcessingInputGraphQL, PartialPostProcessingInputList, PartialPostProcessingInputTextFields, PartialPostProcessingInputWrite, @@ -279,6 +309,7 @@ PartialPostProcessingOutputApply, PartialPostProcessingOutputApplyList, PartialPostProcessingOutputFields, + PartialPostProcessingOutputGraphQL, PartialPostProcessingOutputList, PartialPostProcessingOutputTextFields, PartialPostProcessingOutputWrite, @@ -289,6 +320,7 @@ PlantApply, PlantApplyList, PlantFields, + PlantGraphQL, PlantList, PlantTextFields, PlantWrite, @@ -299,6 +331,7 @@ PlantShopApply, PlantShopApplyList, PlantShopFields, + PlantShopGraphQL, PlantShopList, PlantShopTextFields, PlantShopWrite, @@ -309,6 +342,7 @@ PreprocessorInputApply, PreprocessorInputApplyList, PreprocessorInputFields, + PreprocessorInputGraphQL, PreprocessorInputList, PreprocessorInputTextFields, PreprocessorInputWrite, @@ -319,6 +353,7 @@ PreprocessorOutputApply, PreprocessorOutputApplyList, PreprocessorOutputFields, + PreprocessorOutputGraphQL, PreprocessorOutputList, PreprocessorOutputTextFields, PreprocessorOutputWrite, @@ -329,6 +364,7 @@ PriceAreaApply, PriceAreaApplyList, PriceAreaFields, + PriceAreaGraphQL, PriceAreaList, PriceAreaTextFields, PriceAreaWrite, @@ -339,6 +375,7 @@ PriceAreaAFRRApply, PriceAreaAFRRApplyList, PriceAreaAFRRFields, + PriceAreaAFRRGraphQL, PriceAreaAFRRList, PriceAreaAFRRTextFields, PriceAreaAFRRWrite, @@ -349,6 +386,7 @@ PriceAreaAssetApply, PriceAreaAssetApplyList, PriceAreaAssetFields, + PriceAreaAssetGraphQL, PriceAreaAssetList, PriceAreaAssetTextFields, PriceAreaAssetWrite, @@ -359,6 +397,7 @@ PriceProdCaseApply, PriceProdCaseApplyList, PriceProdCaseFields, + PriceProdCaseGraphQL, PriceProdCaseList, PriceProdCaseWrite, PriceProdCaseWriteList, @@ -368,6 +407,7 @@ ReservoirApply, ReservoirApplyList, ReservoirFields, + ReservoirGraphQL, ReservoirList, ReservoirTextFields, ReservoirWrite, @@ -378,6 +418,7 @@ SHOPResultApply, SHOPResultApplyList, SHOPResultFields, + SHOPResultGraphQL, SHOPResultList, SHOPResultWrite, SHOPResultWriteList, @@ -387,6 +428,7 @@ SHOPResultPriceProdApply, SHOPResultPriceProdApplyList, SHOPResultPriceProdFields, + SHOPResultPriceProdGraphQL, SHOPResultPriceProdList, SHOPResultPriceProdWrite, SHOPResultPriceProdWriteList, @@ -396,6 +438,7 @@ SHOPTimeSeriesApply, SHOPTimeSeriesApplyList, SHOPTimeSeriesFields, + SHOPTimeSeriesGraphQL, SHOPTimeSeriesList, SHOPTimeSeriesTextFields, SHOPTimeSeriesWrite, @@ -406,6 +449,7 @@ SHOPTriggerInputApply, SHOPTriggerInputApplyList, SHOPTriggerInputFields, + SHOPTriggerInputGraphQL, SHOPTriggerInputList, SHOPTriggerInputTextFields, SHOPTriggerInputWrite, @@ -416,6 +460,7 @@ SHOPTriggerOutputApply, SHOPTriggerOutputApplyList, SHOPTriggerOutputFields, + SHOPTriggerOutputGraphQL, SHOPTriggerOutputList, SHOPTriggerOutputTextFields, SHOPTriggerOutputWrite, @@ -426,6 +471,7 @@ ScenarioApply, ScenarioApplyList, ScenarioFields, + ScenarioGraphQL, ScenarioList, ScenarioTextFields, ScenarioWrite, @@ -436,6 +482,7 @@ ShopPartialBidCalculationInputApply, ShopPartialBidCalculationInputApplyList, ShopPartialBidCalculationInputFields, + ShopPartialBidCalculationInputGraphQL, ShopPartialBidCalculationInputList, ShopPartialBidCalculationInputTextFields, ShopPartialBidCalculationInputWrite, @@ -446,6 +493,7 @@ ShopPartialBidCalculationOutputApply, ShopPartialBidCalculationOutputApplyList, ShopPartialBidCalculationOutputFields, + ShopPartialBidCalculationOutputGraphQL, ShopPartialBidCalculationOutputList, ShopPartialBidCalculationOutputTextFields, ShopPartialBidCalculationOutputWrite, @@ -456,6 +504,7 @@ TaskDispatcherShopInputApply, TaskDispatcherShopInputApplyList, TaskDispatcherShopInputFields, + TaskDispatcherShopInputGraphQL, TaskDispatcherShopInputList, TaskDispatcherShopInputTextFields, TaskDispatcherShopInputWrite, @@ -466,6 +515,7 @@ TaskDispatcherShopOutputApply, TaskDispatcherShopOutputApplyList, TaskDispatcherShopOutputFields, + TaskDispatcherShopOutputGraphQL, TaskDispatcherShopOutputList, TaskDispatcherShopOutputTextFields, TaskDispatcherShopOutputWrite, @@ -476,6 +526,7 @@ TaskDispatcherWaterInputApply, TaskDispatcherWaterInputApplyList, TaskDispatcherWaterInputFields, + TaskDispatcherWaterInputGraphQL, TaskDispatcherWaterInputList, TaskDispatcherWaterInputTextFields, TaskDispatcherWaterInputWrite, @@ -486,6 +537,7 @@ TaskDispatcherWaterOutputApply, TaskDispatcherWaterOutputApplyList, TaskDispatcherWaterOutputFields, + TaskDispatcherWaterOutputGraphQL, TaskDispatcherWaterOutputList, TaskDispatcherWaterOutputTextFields, TaskDispatcherWaterOutputWrite, @@ -496,6 +548,7 @@ TotalBidMatrixCalculationInputApply, TotalBidMatrixCalculationInputApplyList, TotalBidMatrixCalculationInputFields, + TotalBidMatrixCalculationInputGraphQL, TotalBidMatrixCalculationInputList, TotalBidMatrixCalculationInputTextFields, TotalBidMatrixCalculationInputWrite, @@ -506,6 +559,7 @@ TotalBidMatrixCalculationOutputApply, TotalBidMatrixCalculationOutputApplyList, TotalBidMatrixCalculationOutputFields, + TotalBidMatrixCalculationOutputGraphQL, TotalBidMatrixCalculationOutputList, TotalBidMatrixCalculationOutputTextFields, TotalBidMatrixCalculationOutputWrite, @@ -516,6 +570,7 @@ TurbineEfficiencyCurveApply, TurbineEfficiencyCurveApplyList, TurbineEfficiencyCurveFields, + TurbineEfficiencyCurveGraphQL, TurbineEfficiencyCurveList, TurbineEfficiencyCurveWrite, TurbineEfficiencyCurveWriteList, @@ -525,6 +580,7 @@ WaterPartialBidCalculationInputApply, WaterPartialBidCalculationInputApplyList, WaterPartialBidCalculationInputFields, + WaterPartialBidCalculationInputGraphQL, WaterPartialBidCalculationInputList, WaterPartialBidCalculationInputTextFields, WaterPartialBidCalculationInputWrite, @@ -535,6 +591,7 @@ WaterPartialBidCalculationOutputApply, WaterPartialBidCalculationOutputApplyList, WaterPartialBidCalculationOutputFields, + WaterPartialBidCalculationOutputGraphQL, WaterPartialBidCalculationOutputList, WaterPartialBidCalculationOutputTextFields, WaterPartialBidCalculationOutputWrite, @@ -545,6 +602,7 @@ WatercourseApply, WatercourseApplyList, WatercourseFields, + WatercourseGraphQL, WatercourseList, WatercourseTextFields, WatercourseWrite, @@ -555,6 +613,7 @@ WatercourseShopApply, WatercourseShopApplyList, WatercourseShopFields, + WatercourseShopGraphQL, WatercourseShopList, WatercourseShopTextFields, WatercourseShopWrite, @@ -562,128 +621,170 @@ ) BasicBidMatrix.model_rebuild() +BasicBidMatrixGraphQL.model_rebuild() BasicBidMatrixWrite.model_rebuild() BasicBidMatrixApply.model_rebuild() BidCalculationTask.model_rebuild() +BidCalculationTaskGraphQL.model_rebuild() BidCalculationTaskWrite.model_rebuild() BidCalculationTaskApply.model_rebuild() BidConfiguration.model_rebuild() +BidConfigurationGraphQL.model_rebuild() BidConfigurationWrite.model_rebuild() BidConfigurationApply.model_rebuild() BidConfigurationShop.model_rebuild() +BidConfigurationShopGraphQL.model_rebuild() BidConfigurationShopWrite.model_rebuild() BidConfigurationShopApply.model_rebuild() BidConfigurationWater.model_rebuild() +BidConfigurationWaterGraphQL.model_rebuild() BidConfigurationWaterWrite.model_rebuild() BidConfigurationWaterApply.model_rebuild() BidDocumentAFRR.model_rebuild() +BidDocumentAFRRGraphQL.model_rebuild() BidDocumentAFRRWrite.model_rebuild() BidDocumentAFRRApply.model_rebuild() BidDocumentDayAhead.model_rebuild() +BidDocumentDayAheadGraphQL.model_rebuild() BidDocumentDayAheadWrite.model_rebuild() BidDocumentDayAheadApply.model_rebuild() BidMatrix.model_rebuild() +BidMatrixGraphQL.model_rebuild() BidMatrixWrite.model_rebuild() BidMatrixApply.model_rebuild() BidMatrixRaw.model_rebuild() +BidMatrixRawGraphQL.model_rebuild() BidMatrixRawWrite.model_rebuild() BidMatrixRawApply.model_rebuild() BidMethodSHOPMultiScenario.model_rebuild() +BidMethodSHOPMultiScenarioGraphQL.model_rebuild() BidMethodSHOPMultiScenarioWrite.model_rebuild() BidMethodSHOPMultiScenarioApply.model_rebuild() BidRow.model_rebuild() +BidRowGraphQL.model_rebuild() BidRowWrite.model_rebuild() BidRowApply.model_rebuild() Case.model_rebuild() +CaseGraphQL.model_rebuild() CaseWrite.model_rebuild() CaseApply.model_rebuild() CustomBidMatrix.model_rebuild() +CustomBidMatrixGraphQL.model_rebuild() CustomBidMatrixWrite.model_rebuild() CustomBidMatrixApply.model_rebuild() Generator.model_rebuild() +GeneratorGraphQL.model_rebuild() GeneratorWrite.model_rebuild() GeneratorApply.model_rebuild() ModelTemplate.model_rebuild() +ModelTemplateGraphQL.model_rebuild() ModelTemplateWrite.model_rebuild() ModelTemplateApply.model_rebuild() MultiScenarioMatrix.model_rebuild() +MultiScenarioMatrixGraphQL.model_rebuild() MultiScenarioMatrixWrite.model_rebuild() MultiScenarioMatrixApply.model_rebuild() MultiScenarioMatrixRaw.model_rebuild() +MultiScenarioMatrixRawGraphQL.model_rebuild() MultiScenarioMatrixRawWrite.model_rebuild() MultiScenarioMatrixRawApply.model_rebuild() PartialPostProcessingInput.model_rebuild() +PartialPostProcessingInputGraphQL.model_rebuild() PartialPostProcessingInputWrite.model_rebuild() PartialPostProcessingInputApply.model_rebuild() PartialPostProcessingOutput.model_rebuild() +PartialPostProcessingOutputGraphQL.model_rebuild() PartialPostProcessingOutputWrite.model_rebuild() PartialPostProcessingOutputApply.model_rebuild() Plant.model_rebuild() +PlantGraphQL.model_rebuild() PlantWrite.model_rebuild() PlantApply.model_rebuild() PreprocessorInput.model_rebuild() +PreprocessorInputGraphQL.model_rebuild() PreprocessorInputWrite.model_rebuild() PreprocessorInputApply.model_rebuild() PreprocessorOutput.model_rebuild() +PreprocessorOutputGraphQL.model_rebuild() PreprocessorOutputWrite.model_rebuild() PreprocessorOutputApply.model_rebuild() PriceAreaAsset.model_rebuild() +PriceAreaAssetGraphQL.model_rebuild() PriceAreaAssetWrite.model_rebuild() PriceAreaAssetApply.model_rebuild() PriceProdCase.model_rebuild() +PriceProdCaseGraphQL.model_rebuild() PriceProdCaseWrite.model_rebuild() PriceProdCaseApply.model_rebuild() SHOPResult.model_rebuild() +SHOPResultGraphQL.model_rebuild() SHOPResultWrite.model_rebuild() SHOPResultApply.model_rebuild() SHOPResultPriceProd.model_rebuild() +SHOPResultPriceProdGraphQL.model_rebuild() SHOPResultPriceProdWrite.model_rebuild() SHOPResultPriceProdApply.model_rebuild() SHOPTriggerInput.model_rebuild() +SHOPTriggerInputGraphQL.model_rebuild() SHOPTriggerInputWrite.model_rebuild() SHOPTriggerInputApply.model_rebuild() SHOPTriggerOutput.model_rebuild() +SHOPTriggerOutputGraphQL.model_rebuild() SHOPTriggerOutputWrite.model_rebuild() SHOPTriggerOutputApply.model_rebuild() Scenario.model_rebuild() +ScenarioGraphQL.model_rebuild() ScenarioWrite.model_rebuild() ScenarioApply.model_rebuild() ShopPartialBidCalculationInput.model_rebuild() +ShopPartialBidCalculationInputGraphQL.model_rebuild() ShopPartialBidCalculationInputWrite.model_rebuild() ShopPartialBidCalculationInputApply.model_rebuild() ShopPartialBidCalculationOutput.model_rebuild() +ShopPartialBidCalculationOutputGraphQL.model_rebuild() ShopPartialBidCalculationOutputWrite.model_rebuild() ShopPartialBidCalculationOutputApply.model_rebuild() TaskDispatcherShopInput.model_rebuild() +TaskDispatcherShopInputGraphQL.model_rebuild() TaskDispatcherShopInputWrite.model_rebuild() TaskDispatcherShopInputApply.model_rebuild() TaskDispatcherShopOutput.model_rebuild() +TaskDispatcherShopOutputGraphQL.model_rebuild() TaskDispatcherShopOutputWrite.model_rebuild() TaskDispatcherShopOutputApply.model_rebuild() TaskDispatcherWaterInput.model_rebuild() +TaskDispatcherWaterInputGraphQL.model_rebuild() TaskDispatcherWaterInputWrite.model_rebuild() TaskDispatcherWaterInputApply.model_rebuild() TaskDispatcherWaterOutput.model_rebuild() +TaskDispatcherWaterOutputGraphQL.model_rebuild() TaskDispatcherWaterOutputWrite.model_rebuild() TaskDispatcherWaterOutputApply.model_rebuild() TotalBidMatrixCalculationInput.model_rebuild() +TotalBidMatrixCalculationInputGraphQL.model_rebuild() TotalBidMatrixCalculationInputWrite.model_rebuild() TotalBidMatrixCalculationInputApply.model_rebuild() TotalBidMatrixCalculationOutput.model_rebuild() +TotalBidMatrixCalculationOutputGraphQL.model_rebuild() TotalBidMatrixCalculationOutputWrite.model_rebuild() TotalBidMatrixCalculationOutputApply.model_rebuild() WaterPartialBidCalculationInput.model_rebuild() +WaterPartialBidCalculationInputGraphQL.model_rebuild() WaterPartialBidCalculationInputWrite.model_rebuild() WaterPartialBidCalculationInputApply.model_rebuild() WaterPartialBidCalculationOutput.model_rebuild() +WaterPartialBidCalculationOutputGraphQL.model_rebuild() WaterPartialBidCalculationOutputWrite.model_rebuild() WaterPartialBidCalculationOutputApply.model_rebuild() Watercourse.model_rebuild() +WatercourseGraphQL.model_rebuild() WatercourseWrite.model_rebuild() WatercourseApply.model_rebuild() + __all__ = [ "DataRecord", + "DataRecordGraphQL", "DataRecordWrite", "ResourcesWrite", "DomainModel", @@ -691,8 +792,11 @@ "DomainModelWrite", "DomainModelList", "DomainRelationWrite", + "GraphQLCore", + "GraphQLList", "ResourcesWriteResult", "Alert", + "AlertGraphQL", "AlertWrite", "AlertApply", "AlertList", @@ -701,6 +805,7 @@ "AlertFields", "AlertTextFields", "BasicBidMatrix", + "BasicBidMatrixGraphQL", "BasicBidMatrixWrite", "BasicBidMatrixApply", "BasicBidMatrixList", @@ -709,6 +814,7 @@ "BasicBidMatrixFields", "BasicBidMatrixTextFields", "BidCalculationTask", + "BidCalculationTaskGraphQL", "BidCalculationTaskWrite", "BidCalculationTaskApply", "BidCalculationTaskList", @@ -716,12 +822,14 @@ "BidCalculationTaskApplyList", "BidCalculationTaskFields", "BidConfiguration", + "BidConfigurationGraphQL", "BidConfigurationWrite", "BidConfigurationApply", "BidConfigurationList", "BidConfigurationWriteList", "BidConfigurationApplyList", "BidConfigurationShop", + "BidConfigurationShopGraphQL", "BidConfigurationShopWrite", "BidConfigurationShopApply", "BidConfigurationShopList", @@ -730,12 +838,14 @@ "BidConfigurationShopFields", "BidConfigurationShopTextFields", "BidConfigurationWater", + "BidConfigurationWaterGraphQL", "BidConfigurationWaterWrite", "BidConfigurationWaterApply", "BidConfigurationWaterList", "BidConfigurationWaterWriteList", "BidConfigurationWaterApplyList", "BidDocumentAFRR", + "BidDocumentAFRRGraphQL", "BidDocumentAFRRWrite", "BidDocumentAFRRApply", "BidDocumentAFRRList", @@ -744,6 +854,7 @@ "BidDocumentAFRRFields", "BidDocumentAFRRTextFields", "BidDocumentDayAhead", + "BidDocumentDayAheadGraphQL", "BidDocumentDayAheadWrite", "BidDocumentDayAheadApply", "BidDocumentDayAheadList", @@ -752,6 +863,7 @@ "BidDocumentDayAheadFields", "BidDocumentDayAheadTextFields", "BidMatrix", + "BidMatrixGraphQL", "BidMatrixWrite", "BidMatrixApply", "BidMatrixList", @@ -760,6 +872,7 @@ "BidMatrixFields", "BidMatrixTextFields", "BidMatrixRaw", + "BidMatrixRawGraphQL", "BidMatrixRawWrite", "BidMatrixRawApply", "BidMatrixRawList", @@ -768,6 +881,7 @@ "BidMatrixRawFields", "BidMatrixRawTextFields", "BidMethod", + "BidMethodGraphQL", "BidMethodWrite", "BidMethodApply", "BidMethodList", @@ -776,6 +890,7 @@ "BidMethodFields", "BidMethodTextFields", "BidMethodAFRR", + "BidMethodAFRRGraphQL", "BidMethodAFRRWrite", "BidMethodAFRRApply", "BidMethodAFRRList", @@ -784,6 +899,7 @@ "BidMethodAFRRFields", "BidMethodAFRRTextFields", "BidMethodCustom", + "BidMethodCustomGraphQL", "BidMethodCustomWrite", "BidMethodCustomApply", "BidMethodCustomList", @@ -792,6 +908,7 @@ "BidMethodCustomFields", "BidMethodCustomTextFields", "BidMethodDayAhead", + "BidMethodDayAheadGraphQL", "BidMethodDayAheadWrite", "BidMethodDayAheadApply", "BidMethodDayAheadList", @@ -800,6 +917,7 @@ "BidMethodDayAheadFields", "BidMethodDayAheadTextFields", "BidMethodSHOPMultiScenario", + "BidMethodSHOPMultiScenarioGraphQL", "BidMethodSHOPMultiScenarioWrite", "BidMethodSHOPMultiScenarioApply", "BidMethodSHOPMultiScenarioList", @@ -808,6 +926,7 @@ "BidMethodSHOPMultiScenarioFields", "BidMethodSHOPMultiScenarioTextFields", "BidMethodWaterValue", + "BidMethodWaterValueGraphQL", "BidMethodWaterValueWrite", "BidMethodWaterValueApply", "BidMethodWaterValueList", @@ -816,6 +935,7 @@ "BidMethodWaterValueFields", "BidMethodWaterValueTextFields", "BidRow", + "BidRowGraphQL", "BidRowWrite", "BidRowApply", "BidRowList", @@ -824,6 +944,7 @@ "BidRowFields", "BidRowTextFields", "Case", + "CaseGraphQL", "CaseWrite", "CaseApply", "CaseList", @@ -831,6 +952,7 @@ "CaseApplyList", "CaseFields", "Commands", + "CommandsGraphQL", "CommandsWrite", "CommandsApply", "CommandsList", @@ -839,6 +961,7 @@ "CommandsFields", "CommandsTextFields", "CustomBidMatrix", + "CustomBidMatrixGraphQL", "CustomBidMatrixWrite", "CustomBidMatrixApply", "CustomBidMatrixList", @@ -847,6 +970,7 @@ "CustomBidMatrixFields", "CustomBidMatrixTextFields", "Generator", + "GeneratorGraphQL", "GeneratorWrite", "GeneratorApply", "GeneratorList", @@ -855,6 +979,7 @@ "GeneratorFields", "GeneratorTextFields", "GeneratorEfficiencyCurve", + "GeneratorEfficiencyCurveGraphQL", "GeneratorEfficiencyCurveWrite", "GeneratorEfficiencyCurveApply", "GeneratorEfficiencyCurveList", @@ -862,6 +987,7 @@ "GeneratorEfficiencyCurveApplyList", "GeneratorEfficiencyCurveFields", "Mapping", + "MappingGraphQL", "MappingWrite", "MappingApply", "MappingList", @@ -870,6 +996,7 @@ "MappingFields", "MappingTextFields", "MarketConfiguration", + "MarketConfigurationGraphQL", "MarketConfigurationWrite", "MarketConfigurationApply", "MarketConfigurationList", @@ -878,6 +1005,7 @@ "MarketConfigurationFields", "MarketConfigurationTextFields", "ModelTemplate", + "ModelTemplateGraphQL", "ModelTemplateWrite", "ModelTemplateApply", "ModelTemplateList", @@ -886,6 +1014,7 @@ "ModelTemplateFields", "ModelTemplateTextFields", "MultiScenarioMatrix", + "MultiScenarioMatrixGraphQL", "MultiScenarioMatrixWrite", "MultiScenarioMatrixApply", "MultiScenarioMatrixList", @@ -894,6 +1023,7 @@ "MultiScenarioMatrixFields", "MultiScenarioMatrixTextFields", "MultiScenarioMatrixRaw", + "MultiScenarioMatrixRawGraphQL", "MultiScenarioMatrixRawWrite", "MultiScenarioMatrixRawApply", "MultiScenarioMatrixRawList", @@ -902,6 +1032,7 @@ "MultiScenarioMatrixRawFields", "MultiScenarioMatrixRawTextFields", "PartialPostProcessingInput", + "PartialPostProcessingInputGraphQL", "PartialPostProcessingInputWrite", "PartialPostProcessingInputApply", "PartialPostProcessingInputList", @@ -910,6 +1041,7 @@ "PartialPostProcessingInputFields", "PartialPostProcessingInputTextFields", "PartialPostProcessingOutput", + "PartialPostProcessingOutputGraphQL", "PartialPostProcessingOutputWrite", "PartialPostProcessingOutputApply", "PartialPostProcessingOutputList", @@ -918,6 +1050,7 @@ "PartialPostProcessingOutputFields", "PartialPostProcessingOutputTextFields", "Plant", + "PlantGraphQL", "PlantWrite", "PlantApply", "PlantList", @@ -926,6 +1059,7 @@ "PlantFields", "PlantTextFields", "PlantShop", + "PlantShopGraphQL", "PlantShopWrite", "PlantShopApply", "PlantShopList", @@ -934,6 +1068,7 @@ "PlantShopFields", "PlantShopTextFields", "PreprocessorInput", + "PreprocessorInputGraphQL", "PreprocessorInputWrite", "PreprocessorInputApply", "PreprocessorInputList", @@ -942,6 +1077,7 @@ "PreprocessorInputFields", "PreprocessorInputTextFields", "PreprocessorOutput", + "PreprocessorOutputGraphQL", "PreprocessorOutputWrite", "PreprocessorOutputApply", "PreprocessorOutputList", @@ -950,6 +1086,7 @@ "PreprocessorOutputFields", "PreprocessorOutputTextFields", "PriceArea", + "PriceAreaGraphQL", "PriceAreaWrite", "PriceAreaApply", "PriceAreaList", @@ -958,6 +1095,7 @@ "PriceAreaFields", "PriceAreaTextFields", "PriceAreaAFRR", + "PriceAreaAFRRGraphQL", "PriceAreaAFRRWrite", "PriceAreaAFRRApply", "PriceAreaAFRRList", @@ -966,6 +1104,7 @@ "PriceAreaAFRRFields", "PriceAreaAFRRTextFields", "PriceAreaAsset", + "PriceAreaAssetGraphQL", "PriceAreaAssetWrite", "PriceAreaAssetApply", "PriceAreaAssetList", @@ -974,6 +1113,7 @@ "PriceAreaAssetFields", "PriceAreaAssetTextFields", "PriceProdCase", + "PriceProdCaseGraphQL", "PriceProdCaseWrite", "PriceProdCaseApply", "PriceProdCaseList", @@ -981,6 +1121,7 @@ "PriceProdCaseApplyList", "PriceProdCaseFields", "Reservoir", + "ReservoirGraphQL", "ReservoirWrite", "ReservoirApply", "ReservoirList", @@ -989,6 +1130,7 @@ "ReservoirFields", "ReservoirTextFields", "SHOPResult", + "SHOPResultGraphQL", "SHOPResultWrite", "SHOPResultApply", "SHOPResultList", @@ -996,6 +1138,7 @@ "SHOPResultApplyList", "SHOPResultFields", "SHOPResultPriceProd", + "SHOPResultPriceProdGraphQL", "SHOPResultPriceProdWrite", "SHOPResultPriceProdApply", "SHOPResultPriceProdList", @@ -1003,6 +1146,7 @@ "SHOPResultPriceProdApplyList", "SHOPResultPriceProdFields", "SHOPTimeSeries", + "SHOPTimeSeriesGraphQL", "SHOPTimeSeriesWrite", "SHOPTimeSeriesApply", "SHOPTimeSeriesList", @@ -1011,6 +1155,7 @@ "SHOPTimeSeriesFields", "SHOPTimeSeriesTextFields", "SHOPTriggerInput", + "SHOPTriggerInputGraphQL", "SHOPTriggerInputWrite", "SHOPTriggerInputApply", "SHOPTriggerInputList", @@ -1019,6 +1164,7 @@ "SHOPTriggerInputFields", "SHOPTriggerInputTextFields", "SHOPTriggerOutput", + "SHOPTriggerOutputGraphQL", "SHOPTriggerOutputWrite", "SHOPTriggerOutputApply", "SHOPTriggerOutputList", @@ -1027,6 +1173,7 @@ "SHOPTriggerOutputFields", "SHOPTriggerOutputTextFields", "Scenario", + "ScenarioGraphQL", "ScenarioWrite", "ScenarioApply", "ScenarioList", @@ -1035,6 +1182,7 @@ "ScenarioFields", "ScenarioTextFields", "ShopPartialBidCalculationInput", + "ShopPartialBidCalculationInputGraphQL", "ShopPartialBidCalculationInputWrite", "ShopPartialBidCalculationInputApply", "ShopPartialBidCalculationInputList", @@ -1043,6 +1191,7 @@ "ShopPartialBidCalculationInputFields", "ShopPartialBidCalculationInputTextFields", "ShopPartialBidCalculationOutput", + "ShopPartialBidCalculationOutputGraphQL", "ShopPartialBidCalculationOutputWrite", "ShopPartialBidCalculationOutputApply", "ShopPartialBidCalculationOutputList", @@ -1051,6 +1200,7 @@ "ShopPartialBidCalculationOutputFields", "ShopPartialBidCalculationOutputTextFields", "TaskDispatcherShopInput", + "TaskDispatcherShopInputGraphQL", "TaskDispatcherShopInputWrite", "TaskDispatcherShopInputApply", "TaskDispatcherShopInputList", @@ -1059,6 +1209,7 @@ "TaskDispatcherShopInputFields", "TaskDispatcherShopInputTextFields", "TaskDispatcherShopOutput", + "TaskDispatcherShopOutputGraphQL", "TaskDispatcherShopOutputWrite", "TaskDispatcherShopOutputApply", "TaskDispatcherShopOutputList", @@ -1067,6 +1218,7 @@ "TaskDispatcherShopOutputFields", "TaskDispatcherShopOutputTextFields", "TaskDispatcherWaterInput", + "TaskDispatcherWaterInputGraphQL", "TaskDispatcherWaterInputWrite", "TaskDispatcherWaterInputApply", "TaskDispatcherWaterInputList", @@ -1075,6 +1227,7 @@ "TaskDispatcherWaterInputFields", "TaskDispatcherWaterInputTextFields", "TaskDispatcherWaterOutput", + "TaskDispatcherWaterOutputGraphQL", "TaskDispatcherWaterOutputWrite", "TaskDispatcherWaterOutputApply", "TaskDispatcherWaterOutputList", @@ -1083,6 +1236,7 @@ "TaskDispatcherWaterOutputFields", "TaskDispatcherWaterOutputTextFields", "TotalBidMatrixCalculationInput", + "TotalBidMatrixCalculationInputGraphQL", "TotalBidMatrixCalculationInputWrite", "TotalBidMatrixCalculationInputApply", "TotalBidMatrixCalculationInputList", @@ -1091,6 +1245,7 @@ "TotalBidMatrixCalculationInputFields", "TotalBidMatrixCalculationInputTextFields", "TotalBidMatrixCalculationOutput", + "TotalBidMatrixCalculationOutputGraphQL", "TotalBidMatrixCalculationOutputWrite", "TotalBidMatrixCalculationOutputApply", "TotalBidMatrixCalculationOutputList", @@ -1099,6 +1254,7 @@ "TotalBidMatrixCalculationOutputFields", "TotalBidMatrixCalculationOutputTextFields", "TurbineEfficiencyCurve", + "TurbineEfficiencyCurveGraphQL", "TurbineEfficiencyCurveWrite", "TurbineEfficiencyCurveApply", "TurbineEfficiencyCurveList", @@ -1106,6 +1262,7 @@ "TurbineEfficiencyCurveApplyList", "TurbineEfficiencyCurveFields", "WaterPartialBidCalculationInput", + "WaterPartialBidCalculationInputGraphQL", "WaterPartialBidCalculationInputWrite", "WaterPartialBidCalculationInputApply", "WaterPartialBidCalculationInputList", @@ -1114,6 +1271,7 @@ "WaterPartialBidCalculationInputFields", "WaterPartialBidCalculationInputTextFields", "WaterPartialBidCalculationOutput", + "WaterPartialBidCalculationOutputGraphQL", "WaterPartialBidCalculationOutputWrite", "WaterPartialBidCalculationOutputApply", "WaterPartialBidCalculationOutputList", @@ -1122,6 +1280,7 @@ "WaterPartialBidCalculationOutputFields", "WaterPartialBidCalculationOutputTextFields", "Watercourse", + "WatercourseGraphQL", "WatercourseWrite", "WatercourseApply", "WatercourseList", @@ -1130,6 +1289,7 @@ "WatercourseFields", "WatercourseTextFields", "WatercourseShop", + "WatercourseShopGraphQL", "WatercourseShopWrite", "WatercourseShopApply", "WatercourseShopList", diff --git a/cognite/powerops/client/_generated/v1/data_classes/_alert.py b/cognite/powerops/client/_generated/v1/data_classes/_alert.py index 994336262..b8a6a48f9 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_alert.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_alert.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,6 +19,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -49,6 +53,86 @@ } +class AlertGraphQL(GraphQLCore): + """This represents the reading version of alert, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the alert. + data_record: The data record of the alert node. + time: Timestamp that the alert occurred (within the workflow) + title: Summary description of the alert + description: Detailed description of the alert + severity: CRITICAL (calculation could not completed) WARNING (calculation completed, with major issue) INFO (calculation completed, with minor issues) + alert_type: Classification of the alert (not in current alerting implementation) + status_code: Unique status code for the alert. May be used by the frontend to avoid use of hardcoded description (i.e. like a translation) + event_ids: An array of associated alert CDF Events (e.g. SHOP Run events) + calculation_run: The identifier of the parent Bid Calculation (required so tha alerts can be created befor the BidDocument) + """ + + view_id = dm.ViewId("sp_powerops_models", "Alert", "1") + time: Optional[datetime.datetime] = None + title: Optional[str] = None + description: Optional[str] = None + severity: Optional[str] = None + alert_type: Optional[str] = Field(None, alias="alertType") + status_code: Optional[int] = Field(None, alias="statusCode") + event_ids: Optional[list[int]] = Field(None, alias="eventIds") + calculation_run: Optional[str] = Field(None, alias="calculationRun") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> Alert: + """Convert this GraphQL format of alert to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Alert( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + time=self.time, + title=self.title, + description=self.description, + severity=self.severity, + alert_type=self.alert_type, + status_code=self.status_code, + event_ids=self.event_ids, + calculation_run=self.calculation_run, + ) + + def as_write(self) -> AlertWrite: + """Convert this GraphQL format of alert to the writing format.""" + return AlertWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + time=self.time, + title=self.title, + description=self.description, + severity=self.severity, + alert_type=self.alert_type, + status_code=self.status_code, + event_ids=self.event_ids, + calculation_run=self.calculation_run, + ) + + class Alert(DomainModel): """This represents the reading version of alert. @@ -140,6 +224,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -177,7 +262,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_basic_bid_matrix.py b/cognite/powerops/client/_generated/v1/data_classes/_basic_bid_matrix.py index d51811a33..916e78ad5 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_basic_bid_matrix.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_basic_bid_matrix.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_matrix import BidMatrix, BidMatrixWrite if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadGraphQL, BidMethodDayAheadWrite __all__ = [ @@ -48,6 +52,90 @@ } +class BasicBidMatrixGraphQL(GraphQLCore): + """This represents the reading version of basic bid matrix, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the basic bid matrix. + data_record: The data record of the basic bid matrix node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + is_processed: Whether the bid matrix has been processed by the bid matrix processor or not + alerts: The alert field. + method: The method field. + """ + + view_id = dm.ViewId("sp_powerops_models", "BasicBidMatrix", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + is_processed: Optional[bool] = Field(None, alias="isProcessed") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + method: Optional[BidMethodDayAheadGraphQL] = Field(None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "method", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BasicBidMatrix: + """Convert this GraphQL format of basic bid matrix to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BasicBidMatrix( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + ) + + def as_write(self) -> BasicBidMatrixWrite: + """Convert this GraphQL format of basic bid matrix to the writing format.""" + return BasicBidMatrixWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + ) + + class BasicBidMatrix(BidMatrix): """This represents the reading version of basic bid matrix. @@ -124,6 +212,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -160,7 +249,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -175,7 +264,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_calculation_task.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_calculation_task.py index 6dd7a08ad..29d2d5553 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_calculation_task.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_calculation_task.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,12 +19,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._plant import Plant, PlantWrite - from ._price_area import PriceArea, PriceAreaWrite + from ._plant import Plant, PlantGraphQL, PlantWrite + from ._price_area import PriceArea, PriceAreaGraphQL, PriceAreaWrite __all__ = [ @@ -41,6 +45,74 @@ } +class BidCalculationTaskGraphQL(GraphQLCore): + """This represents the reading version of bid calculation task, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid calculation task. + data_record: The data record of the bid calculation task node. + plant: The plant field. + bid_date: The bid date that the task is for + price_area: The price area related to the bid calculation task + """ + + view_id = dm.ViewId("sp_powerops_models", "BidCalculationTask", "1") + plant: Optional[PlantGraphQL] = Field(None, repr=False) + bid_date: Optional[datetime.date] = Field(None, alias="bidDate") + price_area: Optional[PriceAreaGraphQL] = Field(None, repr=False, alias="priceArea") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("plant", "price_area", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidCalculationTask: + """Convert this GraphQL format of bid calculation task to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidCalculationTask( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + plant=self.plant.as_read() if isinstance(self.plant, GraphQLCore) else self.plant, + bid_date=self.bid_date, + price_area=self.price_area.as_read() if isinstance(self.price_area, GraphQLCore) else self.price_area, + ) + + def as_write(self) -> BidCalculationTaskWrite: + """Convert this GraphQL format of bid calculation task to the writing format.""" + return BidCalculationTaskWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + plant=self.plant.as_write() if isinstance(self.plant, DomainModel) else self.plant, + bid_date=self.bid_date, + price_area=self.price_area.as_write() if isinstance(self.price_area, DomainModel) else self.price_area, + ) + + class BidCalculationTask(DomainModel): """This represents the reading version of bid calculation task. @@ -111,6 +183,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -141,7 +214,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration.py index 2a03c27ab..4f3da50f3 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,11 +18,12 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._market_configuration import MarketConfiguration, MarketConfigurationWrite + from ._market_configuration import MarketConfiguration, MarketConfigurationGraphQL, MarketConfigurationWrite __all__ = [ @@ -32,6 +36,74 @@ ] +class BidConfigurationGraphQL(GraphQLCore): + """This represents the reading version of bid configuration, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid configuration. + data_record: The data record of the bid configuration node. + market_configuration: The bid method related to the bid configuration + """ + + view_id = dm.ViewId("sp_powerops_models", "BidConfiguration", "1") + market_configuration: Optional[MarketConfigurationGraphQL] = Field(None, repr=False, alias="marketConfiguration") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("market_configuration", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidConfiguration: + """Convert this GraphQL format of bid configuration to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidConfiguration( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + market_configuration=( + self.market_configuration.as_read() + if isinstance(self.market_configuration, GraphQLCore) + else self.market_configuration + ), + ) + + def as_write(self) -> BidConfigurationWrite: + """Convert this GraphQL format of bid configuration to the writing format.""" + return BidConfigurationWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + market_configuration=( + self.market_configuration.as_write() + if isinstance(self.market_configuration, DomainModel) + else self.market_configuration + ), + ) + + class BidConfiguration(DomainModel): """This represents the reading version of bid configuration. @@ -96,6 +168,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -121,7 +194,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_shop.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_shop.py index c49468dc0..a3e4876bf 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_shop.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_shop.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,16 +18,21 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_configuration import BidConfiguration, BidConfigurationWrite if TYPE_CHECKING: - from ._bid_method_shop_multi_scenario import BidMethodSHOPMultiScenario, BidMethodSHOPMultiScenarioWrite - from ._market_configuration import MarketConfiguration, MarketConfigurationWrite - from ._plant_shop import PlantShop, PlantShopWrite - from ._price_area import PriceArea, PriceAreaWrite - from ._watercourse_shop import WatercourseShop, WatercourseShopWrite + from ._bid_method_shop_multi_scenario import ( + BidMethodSHOPMultiScenario, + BidMethodSHOPMultiScenarioGraphQL, + BidMethodSHOPMultiScenarioWrite, + ) + from ._market_configuration import MarketConfiguration, MarketConfigurationGraphQL, MarketConfigurationWrite + from ._plant_shop import PlantShop, PlantShopGraphQL, PlantShopWrite + from ._price_area import PriceArea, PriceAreaGraphQL, PriceAreaWrite + from ._watercourse_shop import WatercourseShop, WatercourseShopGraphQL, WatercourseShopWrite __all__ = [ @@ -47,6 +55,108 @@ } +class BidConfigurationShopGraphQL(GraphQLCore): + """This represents the reading version of bid configuration shop, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid configuration shop. + data_record: The data record of the bid configuration shop node. + market_configuration: The bid method related to the bid configuration + name: The name of the bid configuration + method: The bid method related to the bid configuration + price_area: The price area related to the bid configuration + plants_shop: The plants modelled in the shop runs + watercourses_shop: The watercourses modelled in the shop runs + """ + + view_id = dm.ViewId("sp_powerops_models", "BidConfigurationShop", "1") + market_configuration: Optional[MarketConfigurationGraphQL] = Field(None, repr=False, alias="marketConfiguration") + name: Optional[str] = None + method: Optional[BidMethodSHOPMultiScenarioGraphQL] = Field(None, repr=False) + price_area: Optional[PriceAreaGraphQL] = Field(None, repr=False, alias="priceArea") + plants_shop: Optional[list[PlantShopGraphQL]] = Field(default=None, repr=False, alias="plantsShop") + watercourses_shop: Optional[list[WatercourseShopGraphQL]] = Field( + default=None, repr=False, alias="watercoursesShop" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("market_configuration", "method", "price_area", "plants_shop", "watercourses_shop", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidConfigurationShop: + """Convert this GraphQL format of bid configuration shop to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidConfigurationShop( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + market_configuration=( + self.market_configuration.as_read() + if isinstance(self.market_configuration, GraphQLCore) + else self.market_configuration + ), + name=self.name, + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + price_area=self.price_area.as_read() if isinstance(self.price_area, GraphQLCore) else self.price_area, + plants_shop=[ + plants_shop.as_read() if isinstance(plants_shop, GraphQLCore) else plants_shop + for plants_shop in self.plants_shop or [] + ], + watercourses_shop=[ + watercourses_shop.as_read() if isinstance(watercourses_shop, GraphQLCore) else watercourses_shop + for watercourses_shop in self.watercourses_shop or [] + ], + ) + + def as_write(self) -> BidConfigurationShopWrite: + """Convert this GraphQL format of bid configuration shop to the writing format.""" + return BidConfigurationShopWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + market_configuration=( + self.market_configuration.as_write() + if isinstance(self.market_configuration, DomainModel) + else self.market_configuration + ), + name=self.name, + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + price_area=self.price_area.as_write() if isinstance(self.price_area, DomainModel) else self.price_area, + plants_shop=[ + plants_shop.as_write() if isinstance(plants_shop, DomainModel) else plants_shop + for plants_shop in self.plants_shop or [] + ], + watercourses_shop=[ + watercourses_shop.as_write() if isinstance(watercourses_shop, DomainModel) else watercourses_shop + for watercourses_shop in self.watercourses_shop or [] + ], + ) + + class BidConfigurationShop(BidConfiguration): """This represents the reading version of bid configuration shop. @@ -70,8 +180,10 @@ class BidConfigurationShop(BidConfiguration): name: Optional[str] = None method: Union[BidMethodSHOPMultiScenario, str, dm.NodeId, None] = Field(None, repr=False) price_area: Union[PriceArea, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") - plants_shop: Union[list[PlantShop], list[str], None] = Field(default=None, repr=False, alias="plantsShop") - watercourses_shop: Union[list[WatercourseShop], list[str], None] = Field( + plants_shop: Union[list[PlantShop], list[str], list[dm.NodeId], None] = Field( + default=None, repr=False, alias="plantsShop" + ) + watercourses_shop: Union[list[WatercourseShop], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="watercoursesShop" ) @@ -132,8 +244,10 @@ class BidConfigurationShopWrite(BidConfigurationWrite): name: Optional[str] = None method: Union[BidMethodSHOPMultiScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) price_area: Union[PriceAreaWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") - plants_shop: Union[list[PlantShopWrite], list[str], None] = Field(default=None, repr=False, alias="plantsShop") - watercourses_shop: Union[list[WatercourseShopWrite], list[str], None] = Field( + plants_shop: Union[list[PlantShopWrite], list[str], list[dm.NodeId], None] = Field( + default=None, repr=False, alias="plantsShop" + ) + watercourses_shop: Union[list[WatercourseShopWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="watercoursesShop" ) @@ -142,6 +256,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -182,7 +297,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -197,7 +312,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "BidConfiguration.plantsShop") for plants_shop in self.plants_shop or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=plants_shop, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=plants_shop, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) @@ -209,6 +330,8 @@ def _to_instances_write( end_node=watercourses_shop, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_water.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_water.py index 490588801..a65d89ac8 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_water.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_water.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,16 +18,17 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_configuration import BidConfiguration, BidConfigurationWrite if TYPE_CHECKING: - from ._bid_method_water_value import BidMethodWaterValue, BidMethodWaterValueWrite - from ._market_configuration import MarketConfiguration, MarketConfigurationWrite - from ._plant import Plant, PlantWrite - from ._price_area import PriceArea, PriceAreaWrite - from ._watercourse import Watercourse, WatercourseWrite + from ._bid_method_water_value import BidMethodWaterValue, BidMethodWaterValueGraphQL, BidMethodWaterValueWrite + from ._market_configuration import MarketConfiguration, MarketConfigurationGraphQL, MarketConfigurationWrite + from ._plant import Plant, PlantGraphQL, PlantWrite + from ._price_area import PriceArea, PriceAreaGraphQL, PriceAreaWrite + from ._watercourse import Watercourse, WatercourseGraphQL, WatercourseWrite __all__ = [ @@ -37,6 +41,96 @@ ] +class BidConfigurationWaterGraphQL(GraphQLCore): + """This represents the reading version of bid configuration water, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid configuration water. + data_record: The data record of the bid configuration water node. + market_configuration: The bid method related to the bid configuration + method: The bid method related to the bid configuration + price_area: The price area related to the bid configuration + plants: The plants and related information needed to run the water value based method + watercourses: The watercourses and related information needed to run the water value based method + """ + + view_id = dm.ViewId("sp_powerops_models", "BidConfigurationWater", "1") + market_configuration: Optional[MarketConfigurationGraphQL] = Field(None, repr=False, alias="marketConfiguration") + method: Optional[BidMethodWaterValueGraphQL] = Field(None, repr=False) + price_area: Optional[PriceAreaGraphQL] = Field(None, repr=False, alias="priceArea") + plants: Optional[list[PlantGraphQL]] = Field(default=None, repr=False) + watercourses: Optional[list[WatercourseGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("market_configuration", "method", "price_area", "plants", "watercourses", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidConfigurationWater: + """Convert this GraphQL format of bid configuration water to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidConfigurationWater( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + market_configuration=( + self.market_configuration.as_read() + if isinstance(self.market_configuration, GraphQLCore) + else self.market_configuration + ), + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + price_area=self.price_area.as_read() if isinstance(self.price_area, GraphQLCore) else self.price_area, + plants=[plant.as_read() if isinstance(plant, GraphQLCore) else plant for plant in self.plants or []], + watercourses=[ + watercourse.as_read() if isinstance(watercourse, GraphQLCore) else watercourse + for watercourse in self.watercourses or [] + ], + ) + + def as_write(self) -> BidConfigurationWaterWrite: + """Convert this GraphQL format of bid configuration water to the writing format.""" + return BidConfigurationWaterWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + market_configuration=( + self.market_configuration.as_write() + if isinstance(self.market_configuration, DomainModel) + else self.market_configuration + ), + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + price_area=self.price_area.as_write() if isinstance(self.price_area, DomainModel) else self.price_area, + plants=[plant.as_write() if isinstance(plant, DomainModel) else plant for plant in self.plants or []], + watercourses=[ + watercourse.as_write() if isinstance(watercourse, DomainModel) else watercourse + for watercourse in self.watercourses or [] + ], + ) + + class BidConfigurationWater(BidConfiguration): """This represents the reading version of bid configuration water. @@ -58,8 +152,8 @@ class BidConfigurationWater(BidConfiguration): ) method: Union[BidMethodWaterValue, str, dm.NodeId, None] = Field(None, repr=False) price_area: Union[PriceArea, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") - plants: Union[list[Plant], list[str], None] = Field(default=None, repr=False) - watercourses: Union[list[Watercourse], list[str], None] = Field(default=None, repr=False) + plants: Union[list[Plant], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) + watercourses: Union[list[Watercourse], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidConfigurationWaterWrite: """Convert this read version of bid configuration water to the writing version.""" @@ -112,14 +206,15 @@ class BidConfigurationWaterWrite(BidConfigurationWrite): ) method: Union[BidMethodWaterValueWrite, str, dm.NodeId, None] = Field(None, repr=False) price_area: Union[PriceAreaWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") - plants: Union[list[PlantWrite], list[str], None] = Field(default=None, repr=False) - watercourses: Union[list[WatercourseWrite], list[str], None] = Field(default=None, repr=False) + plants: Union[list[PlantWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) + watercourses: Union[list[WatercourseWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -157,7 +252,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -172,14 +267,26 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "BidConfiguration.plants") for plant in self.plants or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=plant, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=plant, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) edge_type = dm.DirectRelationReference("sp_powerops_types", "BidConfiguration.watercourses") for watercourse in self.watercourses or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=watercourse, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=watercourse, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_document_afrr.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_document_afrr.py index 5f8e9b648..1797ef5b4 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_document_afrr.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_document_afrr.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,13 +19,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_row import BidRow, BidRowWrite - from ._price_area_afrr import PriceAreaAFRR, PriceAreaAFRRWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_row import BidRow, BidRowGraphQL, BidRowWrite + from ._price_area_afrr import PriceAreaAFRR, PriceAreaAFRRGraphQL, PriceAreaAFRRWrite __all__ = [ @@ -49,6 +53,94 @@ } +class BidDocumentAFRRGraphQL(GraphQLCore): + """This represents the reading version of bid document afrr, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid document afrr. + data_record: The data record of the bid document afrr node. + name: Unique name for a given instance of a Bid Document. A combination of name, priceArea, date and startCalculation. + delivery_date: The date of the Bid. + start_calculation: Timestamp of when the Bid calculation workflow started. + end_calculation: Timestamp of when the Bid calculation workflow completed. + is_complete: Indicates that the Bid calculation workflow has completed (although has not necessarily succeeded). + alerts: An array of calculation level Alerts. + price_area: The price area field. + bids: An array of BidRows containing the Bid data. + """ + + view_id = dm.ViewId("sp_powerops_models", "BidDocumentAFRR", "1") + name: Optional[str] = None + delivery_date: Optional[datetime.date] = Field(None, alias="deliveryDate") + start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") + end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") + is_complete: Optional[bool] = Field(None, alias="isComplete") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + price_area: Optional[PriceAreaAFRRGraphQL] = Field(None, repr=False, alias="priceArea") + bids: Optional[list[BidRowGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "price_area", "bids", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidDocumentAFRR: + """Convert this GraphQL format of bid document afrr to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidDocumentAFRR( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + delivery_date=self.delivery_date, + start_calculation=self.start_calculation, + end_calculation=self.end_calculation, + is_complete=self.is_complete, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + price_area=self.price_area.as_read() if isinstance(self.price_area, GraphQLCore) else self.price_area, + bids=[bid.as_read() if isinstance(bid, GraphQLCore) else bid for bid in self.bids or []], + ) + + def as_write(self) -> BidDocumentAFRRWrite: + """Convert this GraphQL format of bid document afrr to the writing format.""" + return BidDocumentAFRRWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + delivery_date=self.delivery_date, + start_calculation=self.start_calculation, + end_calculation=self.end_calculation, + is_complete=self.is_complete, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + price_area=self.price_area.as_write() if isinstance(self.price_area, DomainModel) else self.price_area, + bids=[bid.as_write() if isinstance(bid, DomainModel) else bid for bid in self.bids or []], + ) + + class BidDocumentAFRR(DomainModel): """This represents the reading version of bid document afrr. @@ -77,9 +169,9 @@ class BidDocumentAFRR(DomainModel): start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") is_complete: Optional[bool] = Field(None, alias="isComplete") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) price_area: Union[PriceAreaAFRR, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") - bids: Union[list[BidRow], list[str], None] = Field(default=None, repr=False) + bids: Union[list[BidRow], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidDocumentAFRRWrite: """Convert this read version of bid document afrr to the writing version.""" @@ -135,15 +227,16 @@ class BidDocumentAFRRWrite(DomainModelWrite): start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") is_complete: Optional[bool] = Field(None, alias="isComplete") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) price_area: Union[PriceAreaAFRRWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") - bids: Union[list[BidRowWrite], list[str], None] = Field(default=None, repr=False) + bids: Union[list[BidRowWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -184,7 +277,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -199,14 +292,26 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) edge_type = dm.DirectRelationReference("sp_powerops_types", "partialBid") for bid in self.bids or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=bid, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=bid, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_document_day_ahead.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_document_day_ahead.py index 033cf877e..0224ad724 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_document_day_ahead.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_document_day_ahead.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,14 +19,15 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_matrix import BidMatrix, BidMatrixWrite - from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadWrite - from ._price_area import PriceArea, PriceAreaWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_matrix import BidMatrix, BidMatrixGraphQL, BidMatrixWrite + from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadGraphQL, BidMethodDayAheadWrite + from ._price_area import PriceArea, PriceAreaGraphQL, PriceAreaWrite __all__ = [ @@ -50,6 +54,106 @@ } +class BidDocumentDayAheadGraphQL(GraphQLCore): + """This represents the reading version of bid document day ahead, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid document day ahead. + data_record: The data record of the bid document day ahead node. + name: Unique name for a given instance of a Bid Document. A combination of name, priceArea, date and startCalculation. + delivery_date: The date of the Bid. + start_calculation: Timestamp of when the Bid calculation workflow started. + end_calculation: Timestamp of when the Bid calculation workflow completed. + is_complete: Indicates that the Bid calculation workflow has completed (although has not necessarily succeeded). + alerts: An array of calculation level Alerts. + price_area: The price area field. + method: The method field. + total: The total field. + partials: The partial field. + """ + + view_id = dm.ViewId("sp_powerops_models", "BidDocumentDayAhead", "1") + name: Optional[str] = None + delivery_date: Optional[datetime.date] = Field(None, alias="deliveryDate") + start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") + end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") + is_complete: Optional[bool] = Field(None, alias="isComplete") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + price_area: Optional[PriceAreaGraphQL] = Field(None, repr=False, alias="priceArea") + method: Optional[BidMethodDayAheadGraphQL] = Field(None, repr=False) + total: Optional[BidMatrixGraphQL] = Field(None, repr=False) + partials: Optional[list[BidMatrixGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "price_area", "method", "total", "partials", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidDocumentDayAhead: + """Convert this GraphQL format of bid document day ahead to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidDocumentDayAhead( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + delivery_date=self.delivery_date, + start_calculation=self.start_calculation, + end_calculation=self.end_calculation, + is_complete=self.is_complete, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + price_area=self.price_area.as_read() if isinstance(self.price_area, GraphQLCore) else self.price_area, + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + total=self.total.as_read() if isinstance(self.total, GraphQLCore) else self.total, + partials=[ + partial.as_read() if isinstance(partial, GraphQLCore) else partial for partial in self.partials or [] + ], + ) + + def as_write(self) -> BidDocumentDayAheadWrite: + """Convert this GraphQL format of bid document day ahead to the writing format.""" + return BidDocumentDayAheadWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + delivery_date=self.delivery_date, + start_calculation=self.start_calculation, + end_calculation=self.end_calculation, + is_complete=self.is_complete, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + price_area=self.price_area.as_write() if isinstance(self.price_area, DomainModel) else self.price_area, + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + total=self.total.as_write() if isinstance(self.total, DomainModel) else self.total, + partials=[ + partial.as_write() if isinstance(partial, DomainModel) else partial for partial in self.partials or [] + ], + ) + + class BidDocumentDayAhead(DomainModel): """This represents the reading version of bid document day ahead. @@ -80,11 +184,11 @@ class BidDocumentDayAhead(DomainModel): start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") is_complete: Optional[bool] = Field(None, alias="isComplete") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) price_area: Union[PriceArea, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") method: Union[BidMethodDayAhead, str, dm.NodeId, None] = Field(None, repr=False) total: Union[BidMatrix, str, dm.NodeId, None] = Field(None, repr=False) - partials: Union[list[BidMatrix], list[str], None] = Field(default=None, repr=False) + partials: Union[list[BidMatrix], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidDocumentDayAheadWrite: """Convert this read version of bid document day ahead to the writing version.""" @@ -146,17 +250,18 @@ class BidDocumentDayAheadWrite(DomainModelWrite): start_calculation: Optional[datetime.datetime] = Field(None, alias="startCalculation") end_calculation: Optional[datetime.datetime] = Field(None, alias="endCalculation") is_complete: Optional[bool] = Field(None, alias="isComplete") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) price_area: Union[PriceAreaWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") method: Union[BidMethodDayAheadWrite, str, dm.NodeId, None] = Field(None, repr=False) total: Union[BidMatrixWrite, str, dm.NodeId, None] = Field(None, repr=False) - partials: Union[list[BidMatrixWrite], list[str], None] = Field(default=None, repr=False) + partials: Union[list[BidMatrixWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -209,7 +314,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -224,14 +329,26 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) edge_type = dm.DirectRelationReference("sp_powerops_types", "partialBid") for partial in self.partials or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=partial, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=partial, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_matrix.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_matrix.py index c3a315c71..6cb1f2d57 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_matrix.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_matrix.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,11 +18,12 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite + from ._alert import Alert, AlertGraphQL, AlertWrite __all__ = [ @@ -46,6 +50,86 @@ } +class BidMatrixGraphQL(GraphQLCore): + """This represents the reading version of bid matrix, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid matrix. + data_record: The data record of the bid matrix node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + is_processed: Whether the bid matrix has been processed by the bid matrix processor or not + alerts: The alert field. + """ + + view_id = dm.ViewId("sp_powerops_models", "BidMatrix", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + is_processed: Optional[bool] = Field(None, alias="isProcessed") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidMatrix: + """Convert this GraphQL format of bid matrix to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMatrix( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + ) + + def as_write(self) -> BidMatrixWrite: + """Convert this GraphQL format of bid matrix to the writing format.""" + return BidMatrixWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + ) + + class BidMatrix(DomainModel): """This represents the reading version of bid matrix. @@ -70,7 +154,7 @@ class BidMatrix(DomainModel): asset_type: Optional[str] = Field(None, alias="assetType") asset_id: Optional[str] = Field(None, alias="assetId") is_processed: Optional[bool] = Field(None, alias="isProcessed") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidMatrixWrite: """Convert this read version of bid matrix to the writing version.""" @@ -120,13 +204,14 @@ class BidMatrixWrite(DomainModelWrite): asset_type: Optional[str] = Field(None, alias="assetType") asset_id: Optional[str] = Field(None, alias="assetId") is_processed: Optional[bool] = Field(None, alias="isProcessed") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -155,7 +240,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -170,7 +255,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_matrix_raw.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_matrix_raw.py index 319127137..bc12a223e 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_matrix_raw.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_matrix_raw.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,12 +18,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_matrix import BidMatrix, BidMatrixWrite if TYPE_CHECKING: - from ._alert import Alert, AlertWrite + from ._alert import Alert, AlertGraphQL, AlertWrite __all__ = [ @@ -47,6 +51,86 @@ } +class BidMatrixRawGraphQL(GraphQLCore): + """This represents the reading version of bid matrix raw, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid matrix raw. + data_record: The data record of the bid matrix raw node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + is_processed: Whether the bid matrix has been processed by the bid matrix processor or not + alerts: The alert field. + """ + + view_id = dm.ViewId("sp_powerops_models", "BidMatrixRaw", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + is_processed: Optional[bool] = Field(None, alias="isProcessed") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidMatrixRaw: + """Convert this GraphQL format of bid matrix raw to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMatrixRaw( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + ) + + def as_write(self) -> BidMatrixRawWrite: + """Convert this GraphQL format of bid matrix raw to the writing format.""" + return BidMatrixRawWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + ) + + class BidMatrixRaw(BidMatrix): """This represents the reading version of bid matrix raw. @@ -114,6 +198,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -142,7 +227,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -157,7 +242,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method.py index 3864b72de..c16378272 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -38,6 +42,58 @@ } +class BidMethodGraphQL(GraphQLCore): + """This represents the reading version of bid method, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method. + data_record: The data record of the bid method node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("sp_powerops_models", "BidMethod", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> BidMethod: + """Convert this GraphQL format of bid method to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethod( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> BidMethodWrite: + """Convert this GraphQL format of bid method to the writing format.""" + return BidMethodWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class BidMethod(DomainModel): """This represents the reading version of bid method. @@ -94,6 +150,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -110,7 +167,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_afrr.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_afrr.py index 3620ca2b3..f2790bc5b 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_afrr.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_afrr.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -38,6 +42,58 @@ } +class BidMethodAFRRGraphQL(GraphQLCore): + """This represents the reading version of bid method afrr, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method afrr. + data_record: The data record of the bid method afrr node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("sp_powerops_models", "BidMethodAFRR", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> BidMethodAFRR: + """Convert this GraphQL format of bid method afrr to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethodAFRR( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> BidMethodAFRRWrite: + """Convert this GraphQL format of bid method afrr to the writing format.""" + return BidMethodAFRRWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class BidMethodAFRR(DomainModel): """This represents the reading version of bid method afrr. @@ -98,6 +154,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -116,7 +173,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_custom.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_custom.py index 418e3065b..33e44f8bc 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_custom.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_custom.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadWrite @@ -39,6 +43,58 @@ } +class BidMethodCustomGraphQL(GraphQLCore): + """This represents the reading version of bid method custom, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method custom. + data_record: The data record of the bid method custom node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("sp_powerops_models", "BidMethodCustom", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> BidMethodCustom: + """Convert this GraphQL format of bid method custom to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethodCustom( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> BidMethodCustomWrite: + """Convert this GraphQL format of bid method custom to the writing format.""" + return BidMethodCustomWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class BidMethodCustom(BidMethodDayAhead): """This represents the reading version of bid method custom. @@ -91,6 +147,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -109,7 +166,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_day_ahead.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_day_ahead.py index 498575a7e..43a2aeec5 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_day_ahead.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_day_ahead.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_method import BidMethod, BidMethodWrite @@ -39,6 +43,58 @@ } +class BidMethodDayAheadGraphQL(GraphQLCore): + """This represents the reading version of bid method day ahead, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method day ahead. + data_record: The data record of the bid method day ahead node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("sp_powerops_models", "BidMethodDayAhead", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> BidMethodDayAhead: + """Convert this GraphQL format of bid method day ahead to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethodDayAhead( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> BidMethodDayAheadWrite: + """Convert this GraphQL format of bid method day ahead to the writing format.""" + return BidMethodDayAheadWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class BidMethodDayAhead(BidMethod): """This represents the reading version of bid method day ahead. @@ -91,6 +147,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -109,7 +166,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_shop_multi_scenario.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_shop_multi_scenario.py index bef8f66ef..5fdff1c48 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_shop_multi_scenario.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_shop_multi_scenario.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,12 +18,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadWrite if TYPE_CHECKING: - from ._scenario import Scenario, ScenarioWrite + from ._scenario import Scenario, ScenarioGraphQL, ScenarioWrite __all__ = [ @@ -50,6 +54,88 @@ } +class BidMethodSHOPMultiScenarioGraphQL(GraphQLCore): + """This represents the reading version of bid method shop multi scenario, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method shop multi scenario. + data_record: The data record of the bid method shop multi scenario node. + name: Name for the BidMethod + shop_start_specification: The shop start specification + shop_end_specification: The shop end specification + shop_bid_date_specification: The shop bid date specification + scenarios: The scenarios to run this bid method with (includes incremental mappings and base mappings) + """ + + view_id = dm.ViewId("sp_powerops_models", "BidMethodSHOPMultiScenario", "1") + name: Optional[str] = None + shop_start_specification: Optional[str] = Field(None, alias="shopStartSpecification") + shop_end_specification: Optional[str] = Field(None, alias="shopEndSpecification") + shop_bid_date_specification: Optional[str] = Field(None, alias="shopBidDateSpecification") + scenarios: Optional[list[ScenarioGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("scenarios", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidMethodSHOPMultiScenario: + """Convert this GraphQL format of bid method shop multi scenario to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethodSHOPMultiScenario( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + shop_start_specification=self.shop_start_specification, + shop_end_specification=self.shop_end_specification, + shop_bid_date_specification=self.shop_bid_date_specification, + scenarios=[ + scenario.as_read() if isinstance(scenario, GraphQLCore) else scenario + for scenario in self.scenarios or [] + ], + ) + + def as_write(self) -> BidMethodSHOPMultiScenarioWrite: + """Convert this GraphQL format of bid method shop multi scenario to the writing format.""" + return BidMethodSHOPMultiScenarioWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + shop_start_specification=self.shop_start_specification, + shop_end_specification=self.shop_end_specification, + shop_bid_date_specification=self.shop_bid_date_specification, + scenarios=[ + scenario.as_write() if isinstance(scenario, DomainModel) else scenario + for scenario in self.scenarios or [] + ], + ) + + class BidMethodSHOPMultiScenario(BidMethodDayAhead): """This represents the reading version of bid method shop multi scenario. @@ -72,7 +158,7 @@ class BidMethodSHOPMultiScenario(BidMethodDayAhead): shop_start_specification: Optional[str] = Field(None, alias="shopStartSpecification") shop_end_specification: Optional[str] = Field(None, alias="shopEndSpecification") shop_bid_date_specification: Optional[str] = Field(None, alias="shopBidDateSpecification") - scenarios: Union[list[Scenario], list[str], None] = Field(default=None, repr=False) + scenarios: Union[list[Scenario], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidMethodSHOPMultiScenarioWrite: """Convert this read version of bid method shop multi scenario to the writing version.""" @@ -122,13 +208,14 @@ class BidMethodSHOPMultiScenarioWrite(BidMethodDayAheadWrite): shop_start_specification: Optional[str] = Field(None, alias="shopStartSpecification") shop_end_specification: Optional[str] = Field(None, alias="shopEndSpecification") shop_bid_date_specification: Optional[str] = Field(None, alias="shopBidDateSpecification") - scenarios: Union[list[ScenarioWrite], list[str], None] = Field(default=None, repr=False) + scenarios: Union[list[ScenarioWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -156,7 +243,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -171,7 +258,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.scenarios") for scenario in self.scenarios or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=scenario, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=scenario, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_water_value.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_water_value.py index aca13c26b..237f39463 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_water_value.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_water_value.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadWrite @@ -39,6 +43,58 @@ } +class BidMethodWaterValueGraphQL(GraphQLCore): + """This represents the reading version of bid method water value, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid method water value. + data_record: The data record of the bid method water value node. + name: Name for the BidMethod + """ + + view_id = dm.ViewId("sp_powerops_models", "BidMethodWaterValue", "1") + name: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> BidMethodWaterValue: + """Convert this GraphQL format of bid method water value to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidMethodWaterValue( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + ) + + def as_write(self) -> BidMethodWaterValueWrite: + """Convert this GraphQL format of bid method water value to the writing format.""" + return BidMethodWaterValueWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + ) + + class BidMethodWaterValue(BidMethodDayAhead): """This represents the reading version of bid method water value. @@ -95,6 +151,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -113,7 +170,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_row.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_row.py index c8e697b2f..7b536f379 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_row.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_row.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method_afrr import BidMethodAFRR, BidMethodAFRRWrite - from ._bid_row import BidRow, BidRowWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method_afrr import BidMethodAFRR, BidMethodAFRRGraphQL, BidMethodAFRRWrite + from ._bid_row import BidRow, BidRowGraphQL, BidRowWrite __all__ = [ @@ -62,6 +66,110 @@ } +class BidRowGraphQL(GraphQLCore): + """This represents the reading version of bid row, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the bid row. + data_record: The data record of the bid row node. + price: Price in EUR/MW/h, rounded to nearest price step (0.1?) + quantity_per_hour: The capacity offered, per hour, in MW, rounded to nearest step size (5?) + product: The product field. + is_divisible: The is divisible field. + min_quantity: Min quantity, per hour. Only relevant for divisible Bids. The minimum capacity that must be accepted; this must be lower than capacityPerHour and is rounded to the nearest step (5 MW?)). + is_block: Indication if the row is part of a Block bid. If true: quantityPerHour must have the same value for consecutive hours (and no breaks). Block bids must be accepted for all hours or none. + exclusive_group_id: Other bids with the same ID are part of an exclusive group - only one of them can be accepted, and they must have the same direction (product). Not allowed for block bids. + linked_bid: The linked bid must have the opposite direction (link means that both or none must be accepted). Should be bi-directional. + asset_type: The asset type field. + asset_id: The asset id field. + method: The method field. + alerts: An array of associated alerts. + """ + + view_id = dm.ViewId("sp_powerops_models", "BidRow", "1") + price: Optional[float] = None + quantity_per_hour: Optional[list[float]] = Field(None, alias="quantityPerHour") + product: Optional[str] = None + is_divisible: Optional[bool] = Field(None, alias="isDivisible") + min_quantity: Optional[list[float]] = Field(None, alias="minQuantity") + is_block: Optional[bool] = Field(None, alias="isBlock") + exclusive_group_id: Optional[str] = Field(None, alias="exclusiveGroupId") + linked_bid: Optional[BidRowGraphQL] = Field(None, repr=False, alias="linkedBid") + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + method: Optional[BidMethodAFRRGraphQL] = Field(None, repr=False) + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("linked_bid", "method", "alerts", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> BidRow: + """Convert this GraphQL format of bid row to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return BidRow( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + price=self.price, + quantity_per_hour=self.quantity_per_hour, + product=self.product, + is_divisible=self.is_divisible, + min_quantity=self.min_quantity, + is_block=self.is_block, + exclusive_group_id=self.exclusive_group_id, + linked_bid=self.linked_bid.as_read() if isinstance(self.linked_bid, GraphQLCore) else self.linked_bid, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + ) + + def as_write(self) -> BidRowWrite: + """Convert this GraphQL format of bid row to the writing format.""" + return BidRowWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + price=self.price, + quantity_per_hour=self.quantity_per_hour, + product=self.product, + is_divisible=self.is_divisible, + min_quantity=self.min_quantity, + is_block=self.is_block, + exclusive_group_id=self.exclusive_group_id, + linked_bid=self.linked_bid.as_write() if isinstance(self.linked_bid, DomainModel) else self.linked_bid, + asset_type=self.asset_type, + asset_id=self.asset_id, + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + ) + + class BidRow(DomainModel): """This represents the reading version of bid row. @@ -98,7 +206,7 @@ class BidRow(DomainModel): asset_type: Optional[str] = Field(None, alias="assetType") asset_id: Optional[str] = Field(None, alias="assetId") method: Union[BidMethodAFRR, str, dm.NodeId, None] = Field(None, repr=False) - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> BidRowWrite: """Convert this read version of bid row to the writing version.""" @@ -166,13 +274,14 @@ class BidRowWrite(DomainModelWrite): asset_type: Optional[str] = Field(None, alias="assetType") asset_id: Optional[str] = Field(None, alias="assetId") method: Union[BidMethodAFRRWrite, str, dm.NodeId, None] = Field(None, repr=False) - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -225,7 +334,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -240,7 +349,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_case.py b/cognite/powerops/client/_generated/v1/data_classes/_case.py index 9b10ef63f..9d7e70d59 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_case.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_case.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,11 +19,12 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._scenario import Scenario, ScenarioWrite + from ._scenario import Scenario, ScenarioGraphQL, ScenarioWrite __all__ = [ @@ -57,6 +61,94 @@ } +class CaseGraphQL(GraphQLCore): + """This represents the reading version of case, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the case. + data_record: The data record of the case node. + scenario: The Shop scenario that was used to produce this result + case_file: The case file used + reservoir_mapping: The cut file reservoir mapping + cut_order_files: Cut order files (Module series in PRODRISK) + extra_files: The extra file field. + cog_shop_files_config: Configuration for in what order to load the various files into pyshop + start_time: The start time of the case + end_time: The end time of the case + """ + + view_id = dm.ViewId("sp_powerops_models", "Case", "1") + scenario: Optional[ScenarioGraphQL] = Field(None, repr=False) + case_file: Union[str, None] = Field(None, alias="caseFile") + reservoir_mapping: Optional[list[str]] = Field(None, alias="reservoirMapping") + cut_order_files: Optional[list[str]] = Field(None, alias="cutOrderFiles") + extra_files: Optional[list[str]] = Field(None, alias="extraFiles") + cog_shop_files_config: Optional[list[dict]] = Field(None, alias="cogShopFilesConfig") + start_time: Optional[datetime.date] = Field(None, alias="startTime") + end_time: Optional[datetime.date] = Field(None, alias="endTime") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("scenario", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> Case: + """Convert this GraphQL format of case to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Case( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + scenario=self.scenario.as_read() if isinstance(self.scenario, GraphQLCore) else self.scenario, + case_file=self.case_file, + reservoir_mapping=self.reservoir_mapping, + cut_order_files=self.cut_order_files, + extra_files=self.extra_files, + cog_shop_files_config=self.cog_shop_files_config, + start_time=self.start_time, + end_time=self.end_time, + ) + + def as_write(self) -> CaseWrite: + """Convert this GraphQL format of case to the writing format.""" + return CaseWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + scenario=self.scenario.as_write() if isinstance(self.scenario, DomainModel) else self.scenario, + case_file=self.case_file, + reservoir_mapping=self.reservoir_mapping, + cut_order_files=self.cut_order_files, + extra_files=self.extra_files, + cog_shop_files_config=self.cog_shop_files_config, + start_time=self.start_time, + end_time=self.end_time, + ) + + class Case(DomainModel): """This represents the reading version of case. @@ -148,6 +240,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -188,7 +281,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_commands.py b/cognite/powerops/client/_generated/v1/data_classes/_commands.py index 188c56c33..135574f5d 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_commands.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_commands.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -38,6 +42,58 @@ } +class CommandsGraphQL(GraphQLCore): + """This represents the reading version of command, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the command. + data_record: The data record of the command node. + commands: The commands used in the shop model file + """ + + view_id = dm.ViewId("sp_powerops_models", "Commands", "1") + commands: Optional[list[str]] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> Commands: + """Convert this GraphQL format of command to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Commands( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + commands=self.commands, + ) + + def as_write(self) -> CommandsWrite: + """Convert this GraphQL format of command to the writing format.""" + return CommandsWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + commands=self.commands, + ) + + class Commands(DomainModel): """This represents the reading version of command. @@ -94,6 +150,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -110,7 +167,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_core.py b/cognite/powerops/client/_generated/v1/data_classes/_core.py index a33774072..0de6ff42c 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_core.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_core.py @@ -2,7 +2,7 @@ import datetime import warnings -from abc import abstractmethod +from abc import abstractmethod, ABC from collections import UserList from collections.abc import Collection, Mapping from dataclasses import dataclass, field @@ -61,9 +61,9 @@ def extend(self, other: ResourcesWrite) -> None: @dataclass class ResourcesWriteResult: - nodes: dm.NodeApplyResultList - edges: dm.EdgeApplyResultList - time_series: TimeSeriesList + nodes: dm.NodeApplyResultList = field(default_factory=lambda: dm.NodeApplyResultList([])) + edges: dm.EdgeApplyResultList = field(default_factory=lambda: dm.EdgeApplyResultList([])) + time_series: TimeSeriesList = field(default_factory=lambda: TimeSeriesList([])) # Arbitrary types are allowed to be able to use the TimeSeries class @@ -76,6 +76,66 @@ def _repr_html_(self) -> str: return self.to_pandas().to_frame("value")._repr_html_() # type: ignore[operator] +class DataRecordGraphQL(Core): + last_updated_time: Optional[datetime.datetime] = Field(None, alias="lastUpdatedTime") + created_time: Optional[datetime.datetime] = Field(None, alias="createdTime") + + +class GraphQLCore(Core, ABC): + view_id: ClassVar[dm.ViewId] + space: Optional[str] = None + external_id: Optional[str] = Field(None, alias="externalId") + data_record: Optional[DataRecordGraphQL] = Field(None, alias="dataRecord") + + +class GraphQLList(UserList): + def __init__(self, nodes: Collection[GraphQLCore] = None): + super().__init__(nodes or []) + + # The dunder implementations are to get proper type hints + def __iter__(self) -> Iterator[GraphQLCore]: + return super().__iter__() + + @overload + def __getitem__(self, item: int) -> GraphQLCore: ... + + @overload + def __getitem__(self, item: slice) -> GraphQLCore: ... + + def __getitem__(self, item: int | slice) -> GraphQLCore | GraphQLList: + if isinstance(item, slice): + return self.__class__(self.data[item]) + elif isinstance(item, int): + return self.data[item] + else: + raise TypeError(f"Expected int or slice, got {type(item)}") + + def dump(self) -> list[dict[str, Any]]: + return [node.model_dump() for node in self.data] + + def to_pandas(self) -> pd.DataFrame: + """ + Convert the list of nodes to a pandas.DataFrame. + + Returns: + A pandas.DataFrame with the nodes as rows. + """ + df = pd.DataFrame(self.dump()) + if df.empty: + df = pd.DataFrame(columns=GraphQLCore.model_fields) + # Reorder columns to have the most relevant first + id_columns = ["space", "external_id"] + end_columns = ["data_record"] + fixed_columns = set(id_columns + end_columns) + columns = ( + id_columns + [col for col in df if col not in fixed_columns] + [col for col in end_columns if col in df] + ) + return df[columns] + + def _repr_html_(self) -> str: + return self.to_pandas()._repr_html_() # type: ignore[operator] + + class DomainModelCore(Core): space: str external_id: str = Field(min_length=1, max_length=255, alias="externalId") @@ -199,14 +259,17 @@ class DataRecordWriteList(_DataRecordListCore[DataRecordWrite]): _INSTANCE = DataRecordWrite -class DomainModelWrite(DomainModelCore, extra=Extra.forbid, populate_by_name=True): +class DomainModelWrite(DomainModelCore, extra=Extra.ignore, populate_by_name=True): external_id_factory: ClassVar[Optional[Callable[[type[DomainModelWrite], dict], str]]] = None data_record: DataRecordWrite = Field(default_factory=DataRecordWrite) def to_instances_write( - self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False + self, + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, + write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: - return self._to_instances_write(set(), view_by_read_class, write_none) + return self._to_instances_write(set(), view_by_read_class, write_none, allow_version_increase) def to_instances_apply( self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False @@ -224,6 +287,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: raise NotImplementedError() @@ -340,12 +404,15 @@ def data_records(self) -> DataRecordWriteList: return DataRecordWriteList([node.data_record for node in self]) def to_instances_write( - self, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False + self, + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, + write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: cache: set[tuple[str, str]] = set() domains = ResourcesWrite() for node in self: - result = node._to_instances_write(cache, view_by_read_class, write_none) + result = node._to_instances_write(cache, view_by_read_class, write_none, allow_version_increase) domains.extend(result) return domains @@ -425,24 +492,30 @@ def _to_instances_write( edge_type: dm.DirectRelationReference, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: raise NotImplementedError() @classmethod def create_edge( - cls, start_node: DomainModelWrite | str, end_node: DomainModelWrite | str, edge_type: dm.DirectRelationReference + cls, + start_node: DomainModelWrite | str | dm.NodeId, + end_node: DomainModelWrite | str | dm.NodeId, + edge_type: dm.DirectRelationReference, ) -> dm.EdgeApply: - if isinstance(start_node, DomainModelWrite): - space = start_node.space - elif isinstance(start_node, DomainModelWrite): + if isinstance(start_node, (DomainModelWrite, dm.NodeId)): space = start_node.space + elif isinstance(end_node, (DomainModelWrite, dm.NodeId)): + space = end_node.space else: - raise TypeError(f"Either pass in a start or end node of type {DomainRelationWrite.__name__}") + space = DEFAULT_INSTANCE_SPACE if isinstance(end_node, str): end_ref = dm.DirectRelationReference(space, end_node) elif isinstance(end_node, DomainModelWrite): end_ref = end_node.as_direct_reference() + elif isinstance(end_node, dm.NodeId): + end_ref = dm.DirectRelationReference(end_node.space, end_node.external_id) else: raise TypeError(f"Expected str or subclass of {DomainRelationWrite.__name__}, got {type(end_node)}") @@ -450,6 +523,8 @@ def create_edge( start_ref = dm.DirectRelationReference(space, start_node) elif isinstance(start_node, DomainModelWrite): start_ref = start_node.as_direct_reference() + elif isinstance(start_node, dm.NodeId): + start_ref = dm.DirectRelationReference(start_node.space, start_node.external_id) else: raise TypeError(f"Expected str or subclass of {DomainRelationWrite.__name__}, got {type(start_node)}") @@ -465,11 +540,12 @@ def create_edge( def from_edge_to_resources( cls, cache: set[tuple[str, str]], - start_node: DomainModelWrite | str, - end_node: DomainModelWrite | str, + start_node: DomainModelWrite | str | dm.NodeId, + end_node: DomainModelWrite | str | dm.NodeId, edge_type: dm.DirectRelationReference, view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None = None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() edge = DomainRelationWrite.create_edge(start_node, end_node, edge_type) @@ -479,10 +555,20 @@ def from_edge_to_resources( cache.add((edge.space, edge.external_id)) if isinstance(end_node, DomainModelWrite): - other_resources = end_node._to_instances_write(cache, view_by_read_class, write_none) + other_resources = end_node._to_instances_write( + cache, + view_by_read_class, + write_none, + allow_version_increase, + ) resources.extend(other_resources) if isinstance(start_node, DomainModelWrite): - other_resources = start_node._to_instances_write(cache, view_by_read_class, write_none) + other_resources = start_node._to_instances_write( + cache, + view_by_read_class, + write_none, + allow_version_increase, + ) resources.extend(other_resources) return resources diff --git a/cognite/powerops/client/_generated/v1/data_classes/_custom_bid_matrix.py b/cognite/powerops/client/_generated/v1/data_classes/_custom_bid_matrix.py index d21aebde7..f41faac7d 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_custom_bid_matrix.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_custom_bid_matrix.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_matrix import BidMatrix, BidMatrixWrite if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method_custom import BidMethodCustom, BidMethodCustomWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method_custom import BidMethodCustom, BidMethodCustomGraphQL, BidMethodCustomWrite __all__ = [ @@ -48,6 +52,90 @@ } +class CustomBidMatrixGraphQL(GraphQLCore): + """This represents the reading version of custom bid matrix, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the custom bid matrix. + data_record: The data record of the custom bid matrix node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + is_processed: Whether the bid matrix has been processed by the bid matrix processor or not + alerts: The alert field. + method: The method field. + """ + + view_id = dm.ViewId("sp_powerops_models", "CustomBidMatrix", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + is_processed: Optional[bool] = Field(None, alias="isProcessed") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + method: Optional[BidMethodCustomGraphQL] = Field(None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "method", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> CustomBidMatrix: + """Convert this GraphQL format of custom bid matrix to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return CustomBidMatrix( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + ) + + def as_write(self) -> CustomBidMatrixWrite: + """Convert this GraphQL format of custom bid matrix to the writing format.""" + return CustomBidMatrixWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + ) + + class CustomBidMatrix(BidMatrix): """This represents the reading version of custom bid matrix. @@ -120,6 +208,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -156,7 +245,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -171,7 +260,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_generator.py b/cognite/powerops/client/_generated/v1/data_classes/_generator.py index 3a2fb7672..b695b0d74 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_generator.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_generator.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,13 +19,22 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._generator_efficiency_curve import GeneratorEfficiencyCurve, GeneratorEfficiencyCurveWrite - from ._turbine_efficiency_curve import TurbineEfficiencyCurve, TurbineEfficiencyCurveWrite + from ._generator_efficiency_curve import ( + GeneratorEfficiencyCurve, + GeneratorEfficiencyCurveGraphQL, + GeneratorEfficiencyCurveWrite, + ) + from ._turbine_efficiency_curve import ( + TurbineEfficiencyCurve, + TurbineEfficiencyCurveGraphQL, + TurbineEfficiencyCurveWrite, + ) __all__ = [ @@ -54,6 +66,118 @@ } +class GeneratorGraphQL(GraphQLCore): + """This represents the reading version of generator, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the generator. + data_record: The data record of the generator node. + name: Name for the Asset + display_name: Display name for the Asset. + ordering: The ordering of the asset + p_min: The p min field. + penstock: The penstock field. + start_cost: The start cost field. + start_stop_cost: The start stop cost field. + is_available_time_series: The is available time series field. + efficiency_curve: The efficiency curve field. + turbine_curves: The watercourses that are connected to the PriceArea. + """ + + view_id = dm.ViewId("sp_powerops_models", "Generator", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + ordering: Optional[int] = None + p_min: Optional[float] = Field(None, alias="pMin") + penstock: Optional[int] = None + start_cost: Optional[float] = Field(None, alias="startCost") + start_stop_cost: Union[TimeSeries, str, None] = Field(None, alias="startStopCost") + is_available_time_series: Union[TimeSeries, str, None] = Field(None, alias="isAvailableTimeSeries") + efficiency_curve: Optional[GeneratorEfficiencyCurveGraphQL] = Field(None, repr=False, alias="efficiencyCurve") + turbine_curves: Optional[list[TurbineEfficiencyCurveGraphQL]] = Field( + default=None, repr=False, alias="turbineCurves" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("efficiency_curve", "turbine_curves", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> Generator: + """Convert this GraphQL format of generator to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Generator( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + p_min=self.p_min, + penstock=self.penstock, + start_cost=self.start_cost, + start_stop_cost=self.start_stop_cost, + is_available_time_series=self.is_available_time_series, + efficiency_curve=( + self.efficiency_curve.as_read() + if isinstance(self.efficiency_curve, GraphQLCore) + else self.efficiency_curve + ), + turbine_curves=[ + turbine_curve.as_read() if isinstance(turbine_curve, GraphQLCore) else turbine_curve + for turbine_curve in self.turbine_curves or [] + ], + ) + + def as_write(self) -> GeneratorWrite: + """Convert this GraphQL format of generator to the writing format.""" + return GeneratorWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + p_min=self.p_min, + penstock=self.penstock, + start_cost=self.start_cost, + start_stop_cost=self.start_stop_cost, + is_available_time_series=self.is_available_time_series, + efficiency_curve=( + self.efficiency_curve.as_write() + if isinstance(self.efficiency_curve, DomainModel) + else self.efficiency_curve + ), + turbine_curves=[ + turbine_curve.as_write() if isinstance(turbine_curve, DomainModel) else turbine_curve + for turbine_curve in self.turbine_curves or [] + ], + ) + + class Generator(DomainModel): """This represents the reading version of generator. @@ -88,7 +212,7 @@ class Generator(DomainModel): efficiency_curve: Union[GeneratorEfficiencyCurve, str, dm.NodeId, None] = Field( None, repr=False, alias="efficiencyCurve" ) - turbine_curves: Union[list[TurbineEfficiencyCurve], list[str], None] = Field( + turbine_curves: Union[list[TurbineEfficiencyCurve], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="turbineCurves" ) @@ -161,7 +285,7 @@ class GeneratorWrite(DomainModelWrite): efficiency_curve: Union[GeneratorEfficiencyCurveWrite, str, dm.NodeId, None] = Field( None, repr=False, alias="efficiencyCurve" ) - turbine_curves: Union[list[TurbineEfficiencyCurveWrite], list[str], None] = Field( + turbine_curves: Union[list[TurbineEfficiencyCurveWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="turbineCurves" ) @@ -170,6 +294,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -223,7 +348,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -243,6 +368,8 @@ def _to_instances_write( end_node=turbine_curve, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_generator_efficiency_curve.py b/cognite/powerops/client/_generated/v1/data_classes/_generator_efficiency_curve.py index 50e10525e..a792f0f66 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_generator_efficiency_curve.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_generator_efficiency_curve.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -37,6 +41,66 @@ } +class GeneratorEfficiencyCurveGraphQL(GraphQLCore): + """This represents the reading version of generator efficiency curve, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the generator efficiency curve. + data_record: The data record of the generator efficiency curve node. + ref: The reference value + power: The generator power values + efficiency: The generator efficiency values + """ + + view_id = dm.ViewId("sp_powerops_models", "GeneratorEfficiencyCurve", "1") + ref: Optional[float] = None + power: Optional[list[float]] = None + efficiency: Optional[list[float]] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> GeneratorEfficiencyCurve: + """Convert this GraphQL format of generator efficiency curve to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return GeneratorEfficiencyCurve( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + ref=self.ref, + power=self.power, + efficiency=self.efficiency, + ) + + def as_write(self) -> GeneratorEfficiencyCurveWrite: + """Convert this GraphQL format of generator efficiency curve to the writing format.""" + return GeneratorEfficiencyCurveWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + ref=self.ref, + power=self.power, + efficiency=self.efficiency, + ) + + class GeneratorEfficiencyCurve(DomainModel): """This represents the reading version of generator efficiency curve. @@ -107,6 +171,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -131,7 +196,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_mapping.py b/cognite/powerops/client/_generated/v1/data_classes/_mapping.py index 9e68c576c..61ce568d5 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_mapping.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_mapping.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,6 +19,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) @@ -45,6 +49,74 @@ } +class MappingGraphQL(GraphQLCore): + """This represents the reading version of mapping, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the mapping. + data_record: The data record of the mapping node. + shop_path: The key in shop file to map to + timeseries: The time series to map to + transformations: The transformations to apply to the time series + retrieve: How to retrieve time series data + aggregation: How to aggregate time series data + """ + + view_id = dm.ViewId("sp_powerops_models", "Mapping", "1") + shop_path: Optional[str] = Field(None, alias="shopPath") + timeseries: Union[TimeSeries, str, None] = None + transformations: Optional[list[dict]] = None + retrieve: Optional[str] = None + aggregation: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> Mapping: + """Convert this GraphQL format of mapping to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Mapping( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + shop_path=self.shop_path, + timeseries=self.timeseries, + transformations=self.transformations, + retrieve=self.retrieve, + aggregation=self.aggregation, + ) + + def as_write(self) -> MappingWrite: + """Convert this GraphQL format of mapping to the writing format.""" + return MappingWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + shop_path=self.shop_path, + timeseries=self.timeseries, + transformations=self.transformations, + retrieve=self.retrieve, + aggregation=self.aggregation, + ) + + class Mapping(DomainModel): """This represents the reading version of mapping. @@ -121,6 +193,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -152,7 +225,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_market_configuration.py b/cognite/powerops/client/_generated/v1/data_classes/_market_configuration.py index 3b73279a7..3a59ca964 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_market_configuration.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_market_configuration.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,6 +18,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -49,6 +53,90 @@ } +class MarketConfigurationGraphQL(GraphQLCore): + """This represents the reading version of market configuration, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the market configuration. + data_record: The data record of the market configuration node. + name: The name of the market + max_price: The highest price allowed + min_price: The lowest price allowed + time_zone: The time zone field. + price_unit: Unit of measurement for the price ('EUR/MWh') + price_steps: The maximum number of price steps + tick_size: 'Granularity' of the price; tick size = 0.1 means that prices must be 'rounded to nearest 0.1' (i. e. 66.43 is not allowed, but 66.4 is) + time_unit: The time unit ('1h') + trade_lot: 'Granularity' of the volumes; trade lot = 0.2 means that volumes must be 'rounded to nearest 0.2' (i. e. 66.5 is not allowed, but 66.4 is) + """ + + view_id = dm.ViewId("sp_powerops_models", "MarketConfiguration", "1") + name: Optional[str] = None + max_price: Optional[float] = Field(None, alias="maxPrice") + min_price: Optional[float] = Field(None, alias="minPrice") + time_zone: Optional[str] = Field(None, alias="timeZone") + price_unit: Optional[str] = Field(None, alias="priceUnit") + price_steps: Optional[int] = Field(None, alias="priceSteps") + tick_size: Optional[float] = Field(None, alias="tickSize") + time_unit: Optional[str] = Field(None, alias="timeUnit") + trade_lot: Optional[float] = Field(None, alias="tradeLot") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> MarketConfiguration: + """Convert this GraphQL format of market configuration to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return MarketConfiguration( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + max_price=self.max_price, + min_price=self.min_price, + time_zone=self.time_zone, + price_unit=self.price_unit, + price_steps=self.price_steps, + tick_size=self.tick_size, + time_unit=self.time_unit, + trade_lot=self.trade_lot, + ) + + def as_write(self) -> MarketConfigurationWrite: + """Convert this GraphQL format of market configuration to the writing format.""" + return MarketConfigurationWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + max_price=self.max_price, + min_price=self.min_price, + time_zone=self.time_zone, + price_unit=self.price_unit, + price_steps=self.price_steps, + tick_size=self.tick_size, + time_unit=self.time_unit, + trade_lot=self.trade_lot, + ) + + class MarketConfiguration(DomainModel): """This represents the reading version of market configuration. @@ -149,6 +237,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -191,7 +280,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_model_template.py b/cognite/powerops/client/_generated/v1/data_classes/_model_template.py index 2af566ec0..ded1c7e99 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_model_template.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_model_template.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,12 +18,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._mapping import Mapping, MappingWrite - from ._watercourse_shop import WatercourseShop, WatercourseShopWrite + from ._mapping import Mapping, MappingGraphQL, MappingWrite + from ._watercourse_shop import WatercourseShop, WatercourseShopGraphQL, WatercourseShopWrite __all__ = [ @@ -47,6 +51,96 @@ } +class ModelTemplateGraphQL(GraphQLCore, protected_namespaces=()): + """This represents the reading version of model template, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the model template. + data_record: The data record of the model template node. + version_: The version of the model + shop_version: The version of SHOP to run + watercourse: The watercourse to run the model for + model: The shop model file to use as template before applying base mapping + cog_shop_files_config: Configuration for in what order to load the various files into pyshop + extra_files: Extra files related to a model template + base_mappings: The base mappings for the model + """ + + view_id = dm.ViewId("sp_powerops_models", "ModelTemplate", "1") + version_: Optional[str] = Field(None, alias="version") + shop_version: Optional[str] = Field(None, alias="shopVersion") + watercourse: Optional[WatercourseShopGraphQL] = Field(None, repr=False) + model: Union[str, None] = None + cog_shop_files_config: Optional[list[dict]] = Field(None, alias="cogShopFilesConfig") + extra_files: Optional[list[str]] = Field(None, alias="extraFiles") + base_mappings: Optional[list[MappingGraphQL]] = Field(default=None, repr=False, alias="baseMappings") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("watercourse", "base_mappings", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> ModelTemplate: + """Convert this GraphQL format of model template to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return ModelTemplate( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + version_=self.version_, + shop_version=self.shop_version, + watercourse=self.watercourse.as_read() if isinstance(self.watercourse, GraphQLCore) else self.watercourse, + model=self.model, + cog_shop_files_config=self.cog_shop_files_config, + extra_files=self.extra_files, + base_mappings=[ + base_mapping.as_read() if isinstance(base_mapping, GraphQLCore) else base_mapping + for base_mapping in self.base_mappings or [] + ], + ) + + def as_write(self) -> ModelTemplateWrite: + """Convert this GraphQL format of model template to the writing format.""" + return ModelTemplateWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + version_=self.version_, + shop_version=self.shop_version, + watercourse=self.watercourse.as_write() if isinstance(self.watercourse, DomainModel) else self.watercourse, + model=self.model, + cog_shop_files_config=self.cog_shop_files_config, + extra_files=self.extra_files, + base_mappings=[ + base_mapping.as_write() if isinstance(base_mapping, DomainModel) else base_mapping + for base_mapping in self.base_mappings or [] + ], + ) + + class ModelTemplate(DomainModel, protected_namespaces=()): """This represents the reading version of model template. @@ -75,7 +169,9 @@ class ModelTemplate(DomainModel, protected_namespaces=()): model: Union[str, None] = None cog_shop_files_config: Optional[list[dict]] = Field(None, alias="cogShopFilesConfig") extra_files: Optional[list[str]] = Field(None, alias="extraFiles") - base_mappings: Union[list[Mapping], list[str], None] = Field(default=None, repr=False, alias="baseMappings") + base_mappings: Union[list[Mapping], list[str], list[dm.NodeId], None] = Field( + default=None, repr=False, alias="baseMappings" + ) def as_write(self) -> ModelTemplateWrite: """Convert this read version of model template to the writing version.""" @@ -133,13 +229,16 @@ class ModelTemplateWrite(DomainModelWrite, protected_namespaces=()): model: Union[str, None] = None cog_shop_files_config: Optional[list[dict]] = Field(None, alias="cogShopFilesConfig") extra_files: Optional[list[str]] = Field(None, alias="extraFiles") - base_mappings: Union[list[MappingWrite], list[str], None] = Field(default=None, repr=False, alias="baseMappings") + base_mappings: Union[list[MappingWrite], list[str], list[dm.NodeId], None] = Field( + default=None, repr=False, alias="baseMappings" + ) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -176,7 +275,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -196,6 +295,8 @@ def _to_instances_write( end_node=base_mapping, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix.py b/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix.py index e50a86bee..b83c5653a 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,14 +18,19 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_matrix import BidMatrix, BidMatrixWrite if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method_shop_multi_scenario import BidMethodSHOPMultiScenario, BidMethodSHOPMultiScenarioWrite - from ._price_prod_case import PriceProdCase, PriceProdCaseWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method_shop_multi_scenario import ( + BidMethodSHOPMultiScenario, + BidMethodSHOPMultiScenarioGraphQL, + BidMethodSHOPMultiScenarioWrite, + ) + from ._price_prod_case import PriceProdCase, PriceProdCaseGraphQL, PriceProdCaseWrite __all__ = [ @@ -49,6 +57,100 @@ } +class MultiScenarioMatrixGraphQL(GraphQLCore): + """This represents the reading version of multi scenario matrix, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the multi scenario matrix. + data_record: The data record of the multi scenario matrix node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + is_processed: Whether the bid matrix has been processed by the bid matrix processor or not + alerts: The alert field. + method: The method field. + scenario_results: An array of price/prod pairs, one for each scenario/case - this is needed for the frontend + """ + + view_id = dm.ViewId("sp_powerops_models", "MultiScenarioMatrix", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + is_processed: Optional[bool] = Field(None, alias="isProcessed") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + method: Optional[BidMethodSHOPMultiScenarioGraphQL] = Field(None, repr=False) + scenario_results: Optional[list[PriceProdCaseGraphQL]] = Field(default=None, repr=False, alias="scenarioResults") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "method", "scenario_results", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> MultiScenarioMatrix: + """Convert this GraphQL format of multi scenario matrix to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return MultiScenarioMatrix( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + scenario_results=[ + scenario_result.as_read() if isinstance(scenario_result, GraphQLCore) else scenario_result + for scenario_result in self.scenario_results or [] + ], + ) + + def as_write(self) -> MultiScenarioMatrixWrite: + """Convert this GraphQL format of multi scenario matrix to the writing format.""" + return MultiScenarioMatrixWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + scenario_results=[ + scenario_result.as_write() if isinstance(scenario_result, DomainModel) else scenario_result + for scenario_result in self.scenario_results or [] + ], + ) + + class MultiScenarioMatrix(BidMatrix): """This represents the reading version of multi scenario matrix. @@ -72,7 +174,7 @@ class MultiScenarioMatrix(BidMatrix): "sp_powerops_types", "DayAheadMultiScenarioMatrix" ) method: Union[BidMethodSHOPMultiScenario, str, dm.NodeId, None] = Field(None, repr=False) - scenario_results: Union[list[PriceProdCase], list[str], None] = Field( + scenario_results: Union[list[PriceProdCase], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="scenarioResults" ) @@ -128,7 +230,7 @@ class MultiScenarioMatrixWrite(BidMatrixWrite): "sp_powerops_types", "DayAheadMultiScenarioMatrix" ) method: Union[BidMethodSHOPMultiScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) - scenario_results: Union[list[PriceProdCaseWrite], list[str], None] = Field( + scenario_results: Union[list[PriceProdCaseWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="scenarioResults" ) @@ -137,6 +239,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -173,7 +276,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -188,7 +291,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) @@ -200,6 +309,8 @@ def _to_instances_write( end_node=scenario_result, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix_raw.py b/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix_raw.py index 14c6953a3..ac9036e3d 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix_raw.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix_raw.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,14 +18,19 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._bid_matrix_raw import BidMatrixRaw, BidMatrixRawWrite if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_method_shop_multi_scenario import BidMethodSHOPMultiScenario, BidMethodSHOPMultiScenarioWrite - from ._shop_result_price_prod import SHOPResultPriceProd, SHOPResultPriceProdWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_method_shop_multi_scenario import ( + BidMethodSHOPMultiScenario, + BidMethodSHOPMultiScenarioGraphQL, + BidMethodSHOPMultiScenarioWrite, + ) + from ._shop_result_price_prod import SHOPResultPriceProd, SHOPResultPriceProdGraphQL, SHOPResultPriceProdWrite __all__ = [ @@ -49,6 +57,100 @@ } +class MultiScenarioMatrixRawGraphQL(GraphQLCore): + """This represents the reading version of multi scenario matrix raw, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the multi scenario matrix raw. + data_record: The data record of the multi scenario matrix raw node. + resource_cost: The resource cost field. + matrix: The matrix field. + asset_type: The asset type field. + asset_id: The asset id field. + is_processed: Whether the bid matrix has been processed by the bid matrix processor or not + alerts: The alert field. + method: The method field. + shop_results: An array of results, one for each scenario. + """ + + view_id = dm.ViewId("sp_powerops_models", "MultiScenarioMatrixRaw", "1") + resource_cost: Optional[str] = Field(None, alias="resourceCost") + matrix: Union[str, None] = None + asset_type: Optional[str] = Field(None, alias="assetType") + asset_id: Optional[str] = Field(None, alias="assetId") + is_processed: Optional[bool] = Field(None, alias="isProcessed") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + method: Optional[BidMethodSHOPMultiScenarioGraphQL] = Field(None, repr=False) + shop_results: Optional[list[SHOPResultPriceProdGraphQL]] = Field(default=None, repr=False, alias="shopResults") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "method", "shop_results", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> MultiScenarioMatrixRaw: + """Convert this GraphQL format of multi scenario matrix raw to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return MultiScenarioMatrixRaw( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + method=self.method.as_read() if isinstance(self.method, GraphQLCore) else self.method, + shop_results=[ + shop_result.as_read() if isinstance(shop_result, GraphQLCore) else shop_result + for shop_result in self.shop_results or [] + ], + ) + + def as_write(self) -> MultiScenarioMatrixRawWrite: + """Convert this GraphQL format of multi scenario matrix raw to the writing format.""" + return MultiScenarioMatrixRawWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + resource_cost=self.resource_cost, + matrix=self.matrix, + asset_type=self.asset_type, + asset_id=self.asset_id, + is_processed=self.is_processed, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, + shop_results=[ + shop_result.as_write() if isinstance(shop_result, DomainModel) else shop_result + for shop_result in self.shop_results or [] + ], + ) + + class MultiScenarioMatrixRaw(BidMatrixRaw): """This represents the reading version of multi scenario matrix raw. @@ -72,7 +174,7 @@ class MultiScenarioMatrixRaw(BidMatrixRaw): "sp_powerops_types", "DayAheadMultiScenarioMatrix" ) method: Union[BidMethodSHOPMultiScenario, str, dm.NodeId, None] = Field(None, repr=False) - shop_results: Union[list[SHOPResultPriceProd], list[str], None] = Field( + shop_results: Union[list[SHOPResultPriceProd], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="shopResults" ) @@ -128,7 +230,7 @@ class MultiScenarioMatrixRawWrite(BidMatrixRawWrite): "sp_powerops_types", "DayAheadMultiScenarioMatrix" ) method: Union[BidMethodSHOPMultiScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) - shop_results: Union[list[SHOPResultPriceProdWrite], list[str], None] = Field( + shop_results: Union[list[SHOPResultPriceProdWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="shopResults" ) @@ -137,6 +239,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -173,7 +276,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -188,14 +291,26 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) edge_type = dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.shopResults") for shop_result in self.shop_results or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=shop_result, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=shop_result, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_partial_post_processing_input.py b/cognite/powerops/client/_generated/v1/data_classes/_partial_post_processing_input.py index 8853b8f35..239966782 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_partial_post_processing_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_partial_post_processing_input.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,12 +18,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._bid_matrix_raw import BidMatrixRaw, BidMatrixRawWrite - from ._market_configuration import MarketConfiguration, MarketConfigurationWrite + from ._bid_matrix_raw import BidMatrixRaw, BidMatrixRawGraphQL, BidMatrixRawWrite + from ._market_configuration import MarketConfiguration, MarketConfigurationGraphQL, MarketConfigurationWrite __all__ = [ @@ -46,6 +50,106 @@ } +class PartialPostProcessingInputGraphQL(GraphQLCore): + """This represents the reading version of partial post processing input, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the partial post processing input. + data_record: The data record of the partial post processing input node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + market_config: The market config field. + partial_bid_matrices_raw: The partial bid matrices that needs post processing. + """ + + view_id = dm.ViewId("sp_powerops_models", "PartialPostProcessingInput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + market_config: Optional[MarketConfigurationGraphQL] = Field(None, repr=False, alias="marketConfig") + partial_bid_matrices_raw: Optional[list[BidMatrixRawGraphQL]] = Field( + default=None, repr=False, alias="partialBidMatricesRaw" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("market_config", "partial_bid_matrices_raw", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> PartialPostProcessingInput: + """Convert this GraphQL format of partial post processing input to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PartialPostProcessingInput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + market_config=( + self.market_config.as_read() if isinstance(self.market_config, GraphQLCore) else self.market_config + ), + partial_bid_matrices_raw=[ + ( + partial_bid_matrices_raw.as_read() + if isinstance(partial_bid_matrices_raw, GraphQLCore) + else partial_bid_matrices_raw + ) + for partial_bid_matrices_raw in self.partial_bid_matrices_raw or [] + ], + ) + + def as_write(self) -> PartialPostProcessingInputWrite: + """Convert this GraphQL format of partial post processing input to the writing format.""" + return PartialPostProcessingInputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + market_config=( + self.market_config.as_write() if isinstance(self.market_config, DomainModel) else self.market_config + ), + partial_bid_matrices_raw=[ + ( + partial_bid_matrices_raw.as_write() + if isinstance(partial_bid_matrices_raw, DomainModel) + else partial_bid_matrices_raw + ) + for partial_bid_matrices_raw in self.partial_bid_matrices_raw or [] + ], + ) + + class PartialPostProcessingInput(DomainModel): """This represents the reading version of partial post processing input. @@ -72,7 +176,7 @@ class PartialPostProcessingInput(DomainModel): function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") market_config: Union[MarketConfiguration, str, dm.NodeId, None] = Field(None, repr=False, alias="marketConfig") - partial_bid_matrices_raw: Union[list[BidMatrixRaw], list[str], None] = Field( + partial_bid_matrices_raw: Union[list[BidMatrixRaw], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="partialBidMatricesRaw" ) @@ -135,7 +239,7 @@ class PartialPostProcessingInputWrite(DomainModelWrite): function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") market_config: Union[MarketConfigurationWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="marketConfig") - partial_bid_matrices_raw: Union[list[BidMatrixRawWrite], list[str], None] = Field( + partial_bid_matrices_raw: Union[list[BidMatrixRawWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="partialBidMatricesRaw" ) @@ -144,6 +248,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -179,7 +284,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -199,6 +304,8 @@ def _to_instances_write( end_node=partial_bid_matrices_raw, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_partial_post_processing_output.py b/cognite/powerops/client/_generated/v1/data_classes/_partial_post_processing_output.py index bebbb8458..b5503b5f8 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_partial_post_processing_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_partial_post_processing_output.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,18 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_matrix import BidMatrix, BidMatrixWrite - from ._partial_post_processing_input import PartialPostProcessingInput, PartialPostProcessingInputWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_matrix import BidMatrix, BidMatrixGraphQL, BidMatrixWrite + from ._partial_post_processing_input import ( + PartialPostProcessingInput, + PartialPostProcessingInputGraphQL, + PartialPostProcessingInputWrite, + ) __all__ = [ @@ -47,6 +55,96 @@ } +class PartialPostProcessingOutputGraphQL(GraphQLCore): + """This represents the reading version of partial post processing output, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the partial post processing output. + data_record: The data record of the partial post processing output node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + alerts: An array of calculation level Alerts. + input_: The input field. + partial_matrices: The processed partial bid matrices that are used to calculate the total bid matrix. + """ + + view_id = dm.ViewId("sp_powerops_models", "PartialPostProcessingOutput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + input_: Optional[PartialPostProcessingInputGraphQL] = Field(None, repr=False, alias="input") + partial_matrices: Optional[list[BidMatrixGraphQL]] = Field(default=None, repr=False, alias="partialMatrices") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "input_", "partial_matrices", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> PartialPostProcessingOutput: + """Convert this GraphQL format of partial post processing output to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PartialPostProcessingOutput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + input_=self.input_.as_read() if isinstance(self.input_, GraphQLCore) else self.input_, + partial_matrices=[ + partial_matrice.as_read() if isinstance(partial_matrice, GraphQLCore) else partial_matrice + for partial_matrice in self.partial_matrices or [] + ], + ) + + def as_write(self) -> PartialPostProcessingOutputWrite: + """Convert this GraphQL format of partial post processing output to the writing format.""" + return PartialPostProcessingOutputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, + partial_matrices=[ + partial_matrice.as_write() if isinstance(partial_matrice, DomainModel) else partial_matrice + for partial_matrice in self.partial_matrices or [] + ], + ) + + class PartialPostProcessingOutput(DomainModel): """This represents the reading version of partial post processing output. @@ -73,9 +171,11 @@ class PartialPostProcessingOutput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) input_: Union[PartialPostProcessingInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") - partial_matrices: Union[list[BidMatrix], list[str], None] = Field(default=None, repr=False, alias="partialMatrices") + partial_matrices: Union[list[BidMatrix], list[str], list[dm.NodeId], None] = Field( + default=None, repr=False, alias="partialMatrices" + ) def as_write(self) -> PartialPostProcessingOutputWrite: """Convert this read version of partial post processing output to the writing version.""" @@ -131,9 +231,9 @@ class PartialPostProcessingOutputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) input_: Union[PartialPostProcessingInputWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="input") - partial_matrices: Union[list[BidMatrixWrite], list[str], None] = Field( + partial_matrices: Union[list[BidMatrixWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="partialMatrices" ) @@ -142,6 +242,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -175,7 +276,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -190,7 +291,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) @@ -202,6 +309,8 @@ def _to_instances_write( end_node=partial_matrice, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_plant.py b/cognite/powerops/client/_generated/v1/data_classes/_plant.py index 346360402..579e25476 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_plant.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_plant.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,14 +19,15 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._generator import Generator, GeneratorWrite - from ._reservoir import Reservoir, ReservoirWrite - from ._watercourse import Watercourse, WatercourseWrite + from ._generator import Generator, GeneratorGraphQL, GeneratorWrite + from ._reservoir import Reservoir, ReservoirGraphQL, ReservoirWrite + from ._watercourse import Watercourse, WatercourseGraphQL, WatercourseWrite __all__ = [ @@ -88,6 +92,152 @@ } +class PlantGraphQL(GraphQLCore): + """This represents the reading version of plant, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the plant. + data_record: The data record of the plant node. + name: Name for the Asset + display_name: Display name for the Asset. + ordering: The ordering of the asset + head_loss_factor: The head loss factor field. + outlet_level: The outlet level field. + p_max: The p max field. + p_min: The p min field. + penstock_head_loss_factors: The penstock head loss factor field. + watercourse: The watercourse field. + connection_losses: The connection loss field. + p_max_time_series: The p max time series field. + p_min_time_series: The p min time series field. + water_value_time_series: The water value time series field. + feeding_fee_time_series: The feeding fee time series field. + outlet_level_time_series: The outlet level time series field. + inlet_level_time_series: The inlet level time series field. + head_direct_time_series: The head direct time series field. + inlet_reservoir: The inlet reservoir field. + generators: The generator field. + """ + + view_id = dm.ViewId("sp_powerops_models", "Plant", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + ordering: Optional[int] = None + head_loss_factor: Optional[float] = Field(None, alias="headLossFactor") + outlet_level: Optional[float] = Field(None, alias="outletLevel") + p_max: Optional[float] = Field(None, alias="pMax") + p_min: Optional[float] = Field(None, alias="pMin") + penstock_head_loss_factors: Optional[dict] = Field(None, alias="penstockHeadLossFactors") + watercourse: Optional[WatercourseGraphQL] = Field(None, repr=False) + connection_losses: Optional[float] = Field(None, alias="connectionLosses") + p_max_time_series: Union[TimeSeries, str, None] = Field(None, alias="pMaxTimeSeries") + p_min_time_series: Union[TimeSeries, str, None] = Field(None, alias="pMinTimeSeries") + water_value_time_series: Union[TimeSeries, str, None] = Field(None, alias="waterValueTimeSeries") + feeding_fee_time_series: Union[TimeSeries, str, None] = Field(None, alias="feedingFeeTimeSeries") + outlet_level_time_series: Union[TimeSeries, str, None] = Field(None, alias="outletLevelTimeSeries") + inlet_level_time_series: Union[TimeSeries, str, None] = Field(None, alias="inletLevelTimeSeries") + head_direct_time_series: Union[TimeSeries, str, None] = Field(None, alias="headDirectTimeSeries") + inlet_reservoir: Optional[ReservoirGraphQL] = Field(None, repr=False, alias="inletReservoir") + generators: Optional[list[GeneratorGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("watercourse", "inlet_reservoir", "generators", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> Plant: + """Convert this GraphQL format of plant to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Plant( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + head_loss_factor=self.head_loss_factor, + outlet_level=self.outlet_level, + p_max=self.p_max, + p_min=self.p_min, + penstock_head_loss_factors=self.penstock_head_loss_factors, + watercourse=self.watercourse.as_read() if isinstance(self.watercourse, GraphQLCore) else self.watercourse, + connection_losses=self.connection_losses, + p_max_time_series=self.p_max_time_series, + p_min_time_series=self.p_min_time_series, + water_value_time_series=self.water_value_time_series, + feeding_fee_time_series=self.feeding_fee_time_series, + outlet_level_time_series=self.outlet_level_time_series, + inlet_level_time_series=self.inlet_level_time_series, + head_direct_time_series=self.head_direct_time_series, + inlet_reservoir=( + self.inlet_reservoir.as_read() + if isinstance(self.inlet_reservoir, GraphQLCore) + else self.inlet_reservoir + ), + generators=[ + generator.as_read() if isinstance(generator, GraphQLCore) else generator + for generator in self.generators or [] + ], + ) + + def as_write(self) -> PlantWrite: + """Convert this GraphQL format of plant to the writing format.""" + return PlantWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + head_loss_factor=self.head_loss_factor, + outlet_level=self.outlet_level, + p_max=self.p_max, + p_min=self.p_min, + penstock_head_loss_factors=self.penstock_head_loss_factors, + watercourse=self.watercourse.as_write() if isinstance(self.watercourse, DomainModel) else self.watercourse, + connection_losses=self.connection_losses, + p_max_time_series=self.p_max_time_series, + p_min_time_series=self.p_min_time_series, + water_value_time_series=self.water_value_time_series, + feeding_fee_time_series=self.feeding_fee_time_series, + outlet_level_time_series=self.outlet_level_time_series, + inlet_level_time_series=self.inlet_level_time_series, + head_direct_time_series=self.head_direct_time_series, + inlet_reservoir=( + self.inlet_reservoir.as_write() + if isinstance(self.inlet_reservoir, DomainModel) + else self.inlet_reservoir + ), + generators=[ + generator.as_write() if isinstance(generator, DomainModel) else generator + for generator in self.generators or [] + ], + ) + + class Plant(DomainModel): """This represents the reading version of plant. @@ -138,7 +288,7 @@ class Plant(DomainModel): inlet_level_time_series: Union[TimeSeries, str, None] = Field(None, alias="inletLevelTimeSeries") head_direct_time_series: Union[TimeSeries, str, None] = Field(None, alias="headDirectTimeSeries") inlet_reservoir: Union[Reservoir, str, dm.NodeId, None] = Field(None, repr=False, alias="inletReservoir") - generators: Union[list[Generator], list[str], None] = Field(default=None, repr=False) + generators: Union[list[Generator], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> PlantWrite: """Convert this read version of plant to the writing version.""" @@ -234,13 +384,14 @@ class PlantWrite(DomainModelWrite): inlet_level_time_series: Union[TimeSeries, str, None] = Field(None, alias="inletLevelTimeSeries") head_direct_time_series: Union[TimeSeries, str, None] = Field(None, alias="headDirectTimeSeries") inlet_reservoir: Union[ReservoirWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="inletReservoir") - generators: Union[list[GeneratorWrite], list[str], None] = Field(default=None, repr=False) + generators: Union[list[GeneratorWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -337,7 +488,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -352,7 +503,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "isSubAssetOf") for generator in self.generators or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=generator, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=generator, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_plant_shop.py b/cognite/powerops/client/_generated/v1/data_classes/_plant_shop.py index f19ac0c43..5e3155357 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_plant_shop.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_plant_shop.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,6 +18,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -41,6 +45,66 @@ } +class PlantShopGraphQL(GraphQLCore): + """This represents the reading version of plant shop, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the plant shop. + data_record: The data record of the plant shop node. + name: Name for the Asset + display_name: Display name for the Asset. + ordering: The ordering of the asset + """ + + view_id = dm.ViewId("sp_powerops_models", "PlantShop", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + ordering: Optional[int] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> PlantShop: + """Convert this GraphQL format of plant shop to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PlantShop( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + ) + + def as_write(self) -> PlantShopWrite: + """Convert this GraphQL format of plant shop to the writing format.""" + return PlantShopWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + ) + + class PlantShop(DomainModel): """This represents the reading version of plant shop. @@ -107,6 +171,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -129,7 +194,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_input.py b/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_input.py index 898aa8886..ef08c61dc 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_input.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,11 +19,12 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._scenario import Scenario, ScenarioWrite + from ._scenario import Scenario, ScenarioGraphQL, ScenarioWrite __all__ = [ @@ -50,6 +54,90 @@ } +class PreprocessorInputGraphQL(GraphQLCore): + """This represents the reading version of preprocessor input, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the preprocessor input. + data_record: The data record of the preprocessor input node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + scenario: The scenario to run shop with + shop_start: Start date of bid period + shop_end: End date of bid period + """ + + view_id = dm.ViewId("sp_powerops_models", "PreprocessorInput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + scenario: Optional[ScenarioGraphQL] = Field(None, repr=False) + shop_start: Optional[datetime.date] = Field(None, alias="shopStart") + shop_end: Optional[datetime.date] = Field(None, alias="shopEnd") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("scenario", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> PreprocessorInput: + """Convert this GraphQL format of preprocessor input to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PreprocessorInput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + scenario=self.scenario.as_read() if isinstance(self.scenario, GraphQLCore) else self.scenario, + shop_start=self.shop_start, + shop_end=self.shop_end, + ) + + def as_write(self) -> PreprocessorInputWrite: + """Convert this GraphQL format of preprocessor input to the writing format.""" + return PreprocessorInputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + scenario=self.scenario.as_write() if isinstance(self.scenario, DomainModel) else self.scenario, + shop_start=self.shop_start, + shop_end=self.shop_end, + ) + + class PreprocessorInput(DomainModel): """This represents the reading version of preprocessor input. @@ -140,6 +228,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -179,7 +268,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_output.py b/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_output.py index 8c95ffbe9..56c0d0827 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_output.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._case import Case, CaseWrite - from ._preprocessor_input import PreprocessorInput, PreprocessorInputWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._case import Case, CaseGraphQL, CaseWrite + from ._preprocessor_input import PreprocessorInput, PreprocessorInputGraphQL, PreprocessorInputWrite __all__ = [ @@ -47,6 +51,90 @@ } +class PreprocessorOutputGraphQL(GraphQLCore): + """This represents the reading version of preprocessor output, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the preprocessor output. + data_record: The data record of the preprocessor output node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + alerts: An array of calculation level Alerts. + case: The Case to trigger shop with + input_: The prepped and processed scenario to send to shop trigger + """ + + view_id = dm.ViewId("sp_powerops_models", "PreprocessorOutput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + case: Optional[CaseGraphQL] = Field(None, repr=False) + input_: Optional[PreprocessorInputGraphQL] = Field(None, repr=False, alias="input") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "case", "input_", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> PreprocessorOutput: + """Convert this GraphQL format of preprocessor output to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PreprocessorOutput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + case=self.case.as_read() if isinstance(self.case, GraphQLCore) else self.case, + input_=self.input_.as_read() if isinstance(self.input_, GraphQLCore) else self.input_, + ) + + def as_write(self) -> PreprocessorOutputWrite: + """Convert this GraphQL format of preprocessor output to the writing format.""" + return PreprocessorOutputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + case=self.case.as_write() if isinstance(self.case, DomainModel) else self.case, + input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, + ) + + class PreprocessorOutput(DomainModel): """This represents the reading version of preprocessor output. @@ -73,7 +161,7 @@ class PreprocessorOutput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) case: Union[Case, str, dm.NodeId, None] = Field(None, repr=False) input_: Union[PreprocessorInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") @@ -128,7 +216,7 @@ class PreprocessorOutputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) case: Union[CaseWrite, str, dm.NodeId, None] = Field(None, repr=False) input_: Union[PreprocessorInputWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="input") @@ -137,6 +225,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -176,7 +265,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -191,7 +280,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_price_area.py b/cognite/powerops/client/_generated/v1/data_classes/_price_area.py index 67f27cb8f..c03fd1ffb 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_price_area.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_price_area.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -39,6 +43,62 @@ } +class PriceAreaGraphQL(GraphQLCore): + """This represents the reading version of price area, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the price area. + data_record: The data record of the price area node. + name: The name of the price area + timezone: The timezone of the price area + """ + + view_id = dm.ViewId("sp_powerops_models", "PriceArea", "1") + name: Optional[str] = None + timezone: Optional[str] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> PriceArea: + """Convert this GraphQL format of price area to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PriceArea( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + timezone=self.timezone, + ) + + def as_write(self) -> PriceAreaWrite: + """Convert this GraphQL format of price area to the writing format.""" + return PriceAreaWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + timezone=self.timezone, + ) + + class PriceArea(DomainModel): """This represents the reading version of price area. @@ -100,6 +160,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -119,7 +180,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_price_area_afrr.py b/cognite/powerops/client/_generated/v1/data_classes/_price_area_afrr.py index 649626d15..de6d9e688 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_price_area_afrr.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_price_area_afrr.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,6 +19,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) @@ -76,6 +80,98 @@ } +class PriceAreaAFRRGraphQL(GraphQLCore): + """This represents the reading version of price area afrr, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the price area afrr. + data_record: The data record of the price area afrr node. + name: The name of the price area + timezone: The timezone of the price area + capacity_price_up: The capacity price up field. + capacity_price_down: The capacity price down field. + activation_price_up: The mFRR activation price (TBC) + activation_price_down: The mFRR activate price (TBC) + relative_activation: Value between -1 (100 % activation down) and 1 (100 % activation down) + total_capacity_allocation_up: The total capacity allocation up field. + total_capacity_allocation_down: The total capacity allocation down field. + own_capacity_allocation_up: The own capacity allocation up field. + own_capacity_allocation_down: The own capacity allocation down field. + """ + + view_id = dm.ViewId("sp_powerops_models", "PriceAreaAFRR", "1") + name: Optional[str] = None + timezone: Optional[str] = None + capacity_price_up: Union[TimeSeries, str, None] = Field(None, alias="capacityPriceUp") + capacity_price_down: Union[TimeSeries, str, None] = Field(None, alias="capacityPriceDown") + activation_price_up: Union[TimeSeries, str, None] = Field(None, alias="activationPriceUp") + activation_price_down: Union[TimeSeries, str, None] = Field(None, alias="activationPriceDown") + relative_activation: Union[TimeSeries, str, None] = Field(None, alias="relativeActivation") + total_capacity_allocation_up: Union[TimeSeries, str, None] = Field(None, alias="totalCapacityAllocationUp") + total_capacity_allocation_down: Union[TimeSeries, str, None] = Field(None, alias="totalCapacityAllocationDown") + own_capacity_allocation_up: Union[TimeSeries, str, None] = Field(None, alias="ownCapacityAllocationUp") + own_capacity_allocation_down: Union[TimeSeries, str, None] = Field(None, alias="ownCapacityAllocationDown") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> PriceAreaAFRR: + """Convert this GraphQL format of price area afrr to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PriceAreaAFRR( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + timezone=self.timezone, + capacity_price_up=self.capacity_price_up, + capacity_price_down=self.capacity_price_down, + activation_price_up=self.activation_price_up, + activation_price_down=self.activation_price_down, + relative_activation=self.relative_activation, + total_capacity_allocation_up=self.total_capacity_allocation_up, + total_capacity_allocation_down=self.total_capacity_allocation_down, + own_capacity_allocation_up=self.own_capacity_allocation_up, + own_capacity_allocation_down=self.own_capacity_allocation_down, + ) + + def as_write(self) -> PriceAreaAFRRWrite: + """Convert this GraphQL format of price area afrr to the writing format.""" + return PriceAreaAFRRWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + timezone=self.timezone, + capacity_price_up=self.capacity_price_up, + capacity_price_down=self.capacity_price_down, + activation_price_up=self.activation_price_up, + activation_price_down=self.activation_price_down, + relative_activation=self.relative_activation, + total_capacity_allocation_up=self.total_capacity_allocation_up, + total_capacity_allocation_down=self.total_capacity_allocation_down, + own_capacity_allocation_up=self.own_capacity_allocation_up, + own_capacity_allocation_down=self.own_capacity_allocation_down, + ) + + class PriceAreaAFRR(PriceArea): """This represents the reading version of price area afrr. @@ -176,6 +272,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -251,7 +348,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_price_area_asset.py b/cognite/powerops/client/_generated/v1/data_classes/_price_area_asset.py index e321e02a8..1dae25829 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_price_area_asset.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_price_area_asset.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) from ._price_area import PriceArea, PriceAreaWrite if TYPE_CHECKING: - from ._plant import Plant, PlantWrite - from ._watercourse import Watercourse, WatercourseWrite + from ._plant import Plant, PlantGraphQL, PlantWrite + from ._watercourse import Watercourse, WatercourseGraphQL, WatercourseWrite __all__ = [ @@ -45,6 +49,84 @@ } +class PriceAreaAssetGraphQL(GraphQLCore): + """This represents the reading version of price area asset, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the price area asset. + data_record: The data record of the price area asset node. + name: The name of the price area + timezone: The timezone of the price area + plants: An array of associated plants. + watercourses: An array of associated watercourses. + """ + + view_id = dm.ViewId("sp_powerops_models", "PriceAreaAsset", "1") + name: Optional[str] = None + timezone: Optional[str] = None + plants: Optional[list[PlantGraphQL]] = Field(default=None, repr=False) + watercourses: Optional[list[WatercourseGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("plants", "watercourses", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> PriceAreaAsset: + """Convert this GraphQL format of price area asset to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PriceAreaAsset( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + timezone=self.timezone, + plants=[plant.as_read() if isinstance(plant, GraphQLCore) else plant for plant in self.plants or []], + watercourses=[ + watercourse.as_read() if isinstance(watercourse, GraphQLCore) else watercourse + for watercourse in self.watercourses or [] + ], + ) + + def as_write(self) -> PriceAreaAssetWrite: + """Convert this GraphQL format of price area asset to the writing format.""" + return PriceAreaAssetWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + timezone=self.timezone, + plants=[plant.as_write() if isinstance(plant, DomainModel) else plant for plant in self.plants or []], + watercourses=[ + watercourse.as_write() if isinstance(watercourse, DomainModel) else watercourse + for watercourse in self.watercourses or [] + ], + ) + + class PriceAreaAsset(PriceArea): """This represents the reading version of price area asset. @@ -61,8 +143,8 @@ class PriceAreaAsset(PriceArea): """ node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference("sp_powerops_types", "PriceArea") - plants: Union[list[Plant], list[str], None] = Field(default=None, repr=False) - watercourses: Union[list[Watercourse], list[str], None] = Field(default=None, repr=False) + plants: Union[list[Plant], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) + watercourses: Union[list[Watercourse], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> PriceAreaAssetWrite: """Convert this read version of price area asset to the writing version.""" @@ -105,14 +187,15 @@ class PriceAreaAssetWrite(PriceAreaWrite): """ node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference("sp_powerops_types", "PriceArea") - plants: Union[list[PlantWrite], list[str], None] = Field(default=None, repr=False) - watercourses: Union[list[WatercourseWrite], list[str], None] = Field(default=None, repr=False) + plants: Union[list[PlantWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) + watercourses: Union[list[WatercourseWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -134,7 +217,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -149,14 +232,26 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "isPlantOf") for plant in self.plants or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=plant, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=plant, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) edge_type = dm.DirectRelationReference("sp_powerops_types", "isWatercourseOf") for watercourse in self.watercourses or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=watercourse, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=watercourse, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_price_prod_case.py b/cognite/powerops/client/_generated/v1/data_classes/_price_prod_case.py index d6cc8fcb3..3959e0e20 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_price_prod_case.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_price_prod_case.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,12 +19,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._case import Case, CaseWrite + from ._case import Case, CaseGraphQL, CaseWrite __all__ = [ @@ -45,6 +49,74 @@ } +class PriceProdCaseGraphQL(GraphQLCore): + """This represents the reading version of price prod case, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the price prod case. + data_record: The data record of the price prod case node. + price: The price field. + production: The production field. + case: The case field. + """ + + view_id = dm.ViewId("sp_powerops_models", "PriceProdCase", "1") + price: Union[TimeSeries, str, None] = None + production: Union[TimeSeries, str, None] = None + case: Optional[CaseGraphQL] = Field(None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("case", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> PriceProdCase: + """Convert this GraphQL format of price prod case to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return PriceProdCase( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + price=self.price, + production=self.production, + case=self.case.as_read() if isinstance(self.case, GraphQLCore) else self.case, + ) + + def as_write(self) -> PriceProdCaseWrite: + """Convert this GraphQL format of price prod case to the writing format.""" + return PriceProdCaseWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + price=self.price, + production=self.production, + case=self.case.as_write() if isinstance(self.case, DomainModel) else self.case, + ) + + class PriceProdCase(DomainModel): """This represents the reading version of price prod case. @@ -115,6 +187,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -148,7 +221,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_reservoir.py b/cognite/powerops/client/_generated/v1/data_classes/_reservoir.py index 7c4136063..b2fdcf645 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_reservoir.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_reservoir.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,6 +18,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -41,6 +45,66 @@ } +class ReservoirGraphQL(GraphQLCore): + """This represents the reading version of reservoir, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the reservoir. + data_record: The data record of the reservoir node. + name: Name for the Asset + display_name: Display name for the Asset. + ordering: The ordering of the asset + """ + + view_id = dm.ViewId("sp_powerops_models", "Reservoir", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + ordering: Optional[int] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> Reservoir: + """Convert this GraphQL format of reservoir to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Reservoir( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + ) + + def as_write(self) -> ReservoirWrite: + """Convert this GraphQL format of reservoir to the writing format.""" + return ReservoirWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + ) + + class Reservoir(DomainModel): """This represents the reading version of reservoir. @@ -107,6 +171,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -129,7 +194,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_scenario.py b/cognite/powerops/client/_generated/v1/data_classes/_scenario.py index 0ba16982f..a57cebe59 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_scenario.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_scenario.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._commands import Commands, CommandsWrite - from ._mapping import Mapping, MappingWrite - from ._model_template import ModelTemplate, ModelTemplateWrite + from ._commands import Commands, CommandsGraphQL, CommandsWrite + from ._mapping import Mapping, MappingGraphQL, MappingWrite + from ._model_template import ModelTemplate, ModelTemplateGraphQL, ModelTemplateWrite __all__ = [ @@ -45,6 +49,92 @@ } +class ScenarioGraphQL(GraphQLCore, protected_namespaces=()): + """This represents the reading version of scenario, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the scenario. + data_record: The data record of the scenario node. + name: The name of the scenario to run + model_template: The model template to use when running the scenario + commands: The commands to run + source: The source of the scenario + mappings_override: An array of base mappings to override in shop model file + """ + + view_id = dm.ViewId("sp_powerops_models", "Scenario", "1") + name: Optional[str] = None + model_template: Optional[ModelTemplateGraphQL] = Field(None, repr=False, alias="modelTemplate") + commands: Optional[CommandsGraphQL] = Field(None, repr=False) + source: Optional[str] = None + mappings_override: Optional[list[MappingGraphQL]] = Field(default=None, repr=False, alias="mappingsOverride") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("model_template", "commands", "mappings_override", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> Scenario: + """Convert this GraphQL format of scenario to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Scenario( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + model_template=( + self.model_template.as_read() if isinstance(self.model_template, GraphQLCore) else self.model_template + ), + commands=self.commands.as_read() if isinstance(self.commands, GraphQLCore) else self.commands, + source=self.source, + mappings_override=[ + mappings_override.as_read() if isinstance(mappings_override, GraphQLCore) else mappings_override + for mappings_override in self.mappings_override or [] + ], + ) + + def as_write(self) -> ScenarioWrite: + """Convert this GraphQL format of scenario to the writing format.""" + return ScenarioWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + model_template=( + self.model_template.as_write() if isinstance(self.model_template, DomainModel) else self.model_template + ), + commands=self.commands.as_write() if isinstance(self.commands, DomainModel) else self.commands, + source=self.source, + mappings_override=[ + mappings_override.as_write() if isinstance(mappings_override, DomainModel) else mappings_override + for mappings_override in self.mappings_override or [] + ], + ) + + class Scenario(DomainModel, protected_namespaces=()): """This represents the reading version of scenario. @@ -67,7 +157,9 @@ class Scenario(DomainModel, protected_namespaces=()): model_template: Union[ModelTemplate, str, dm.NodeId, None] = Field(None, repr=False, alias="modelTemplate") commands: Union[Commands, str, dm.NodeId, None] = Field(None, repr=False) source: Optional[str] = None - mappings_override: Union[list[Mapping], list[str], None] = Field(default=None, repr=False, alias="mappingsOverride") + mappings_override: Union[list[Mapping], list[str], list[dm.NodeId], None] = Field( + default=None, repr=False, alias="mappingsOverride" + ) def as_write(self) -> ScenarioWrite: """Convert this read version of scenario to the writing version.""" @@ -119,7 +211,7 @@ class ScenarioWrite(DomainModelWrite, protected_namespaces=()): model_template: Union[ModelTemplateWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="modelTemplate") commands: Union[CommandsWrite, str, dm.NodeId, None] = Field(None, repr=False) source: Optional[str] = None - mappings_override: Union[list[MappingWrite], list[str], None] = Field( + mappings_override: Union[list[MappingWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="mappingsOverride" ) @@ -128,6 +220,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -161,7 +254,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -181,6 +274,8 @@ def _to_instances_write( end_node=mappings_override, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_input.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_input.py index 80a9ca68b..aad1b5f52 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_input.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,13 +19,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._market_configuration import MarketConfiguration, MarketConfigurationWrite - from ._plant_shop import PlantShop, PlantShopWrite - from ._shop_result_price_prod import SHOPResultPriceProd, SHOPResultPriceProdWrite + from ._market_configuration import MarketConfiguration, MarketConfigurationGraphQL, MarketConfigurationWrite + from ._plant_shop import PlantShop, PlantShopGraphQL, PlantShopWrite + from ._shop_result_price_prod import SHOPResultPriceProd, SHOPResultPriceProdGraphQL, SHOPResultPriceProdWrite __all__ = [ @@ -52,6 +56,122 @@ } +class ShopPartialBidCalculationInputGraphQL(GraphQLCore): + """This represents the reading version of shop partial bid calculation input, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop partial bid calculation input. + data_record: The data record of the shop partial bid calculation input node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + plant: The plant to calculate the partial bid for. Extract price/prod timeseries from Shop Results + market_configuration: The market configuration to be used to generate the partial bid matrix + step_enabled: Whether the step is enabled or not + bid_date: The bid date + shop_result_price_prod: An array of shop results with price/prod timeserires pairs for all plants included in the respective shop scenario + """ + + view_id = dm.ViewId("sp_powerops_models", "ShopPartialBidCalculationInput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + plant: Optional[PlantShopGraphQL] = Field(None, repr=False) + market_configuration: Optional[MarketConfigurationGraphQL] = Field(None, repr=False, alias="marketConfiguration") + step_enabled: Optional[bool] = Field(None, alias="stepEnabled") + bid_date: Optional[datetime.date] = Field(None, alias="bidDate") + shop_result_price_prod: Optional[list[SHOPResultPriceProdGraphQL]] = Field( + default=None, repr=False, alias="shopResultPriceProd" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("plant", "market_configuration", "shop_result_price_prod", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> ShopPartialBidCalculationInput: + """Convert this GraphQL format of shop partial bid calculation input to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return ShopPartialBidCalculationInput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + plant=self.plant.as_read() if isinstance(self.plant, GraphQLCore) else self.plant, + market_configuration=( + self.market_configuration.as_read() + if isinstance(self.market_configuration, GraphQLCore) + else self.market_configuration + ), + step_enabled=self.step_enabled, + bid_date=self.bid_date, + shop_result_price_prod=[ + ( + shop_result_price_prod.as_read() + if isinstance(shop_result_price_prod, GraphQLCore) + else shop_result_price_prod + ) + for shop_result_price_prod in self.shop_result_price_prod or [] + ], + ) + + def as_write(self) -> ShopPartialBidCalculationInputWrite: + """Convert this GraphQL format of shop partial bid calculation input to the writing format.""" + return ShopPartialBidCalculationInputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + plant=self.plant.as_write() if isinstance(self.plant, DomainModel) else self.plant, + market_configuration=( + self.market_configuration.as_write() + if isinstance(self.market_configuration, DomainModel) + else self.market_configuration + ), + step_enabled=self.step_enabled, + bid_date=self.bid_date, + shop_result_price_prod=[ + ( + shop_result_price_prod.as_write() + if isinstance(shop_result_price_prod, DomainModel) + else shop_result_price_prod + ) + for shop_result_price_prod in self.shop_result_price_prod or [] + ], + ) + + class ShopPartialBidCalculationInput(DomainModel): """This represents the reading version of shop partial bid calculation input. @@ -86,7 +206,7 @@ class ShopPartialBidCalculationInput(DomainModel): ) step_enabled: Optional[bool] = Field(None, alias="stepEnabled") bid_date: Optional[datetime.date] = Field(None, alias="bidDate") - shop_result_price_prod: Union[list[SHOPResultPriceProd], list[str], None] = Field( + shop_result_price_prod: Union[list[SHOPResultPriceProd], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="shopResultPriceProd" ) @@ -162,7 +282,7 @@ class ShopPartialBidCalculationInputWrite(DomainModelWrite): ) step_enabled: Optional[bool] = Field(None, alias="stepEnabled") bid_date: Optional[datetime.date] = Field(None, alias="bidDate") - shop_result_price_prod: Union[list[SHOPResultPriceProdWrite], list[str], None] = Field( + shop_result_price_prod: Union[list[SHOPResultPriceProdWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="shopResultPriceProd" ) @@ -171,6 +291,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -220,7 +341,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -240,6 +361,8 @@ def _to_instances_write( end_node=shop_result_price_prod, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_output.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_output.py index 2242c4d3c..f91f5b0ff 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_output.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,22 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._multi_scenario_matrix_raw import MultiScenarioMatrixRaw, MultiScenarioMatrixRawWrite - from ._shop_partial_bid_calculation_input import ShopPartialBidCalculationInput, ShopPartialBidCalculationInputWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._multi_scenario_matrix_raw import ( + MultiScenarioMatrixRaw, + MultiScenarioMatrixRawGraphQL, + MultiScenarioMatrixRawWrite, + ) + from ._shop_partial_bid_calculation_input import ( + ShopPartialBidCalculationInput, + ShopPartialBidCalculationInputGraphQL, + ShopPartialBidCalculationInputWrite, + ) __all__ = [ @@ -47,6 +59,94 @@ } +class ShopPartialBidCalculationOutputGraphQL(GraphQLCore): + """This represents the reading version of shop partial bid calculation output, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop partial bid calculation output. + data_record: The data record of the shop partial bid calculation output node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + alerts: An array of calculation level Alerts. + bid_matrix_raw: The bid matrix that is calculated by the partial bid calculation function + input_: The previous step in the process. + """ + + view_id = dm.ViewId("sp_powerops_models", "ShopPartialBidCalculationOutput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + bid_matrix_raw: Optional[MultiScenarioMatrixRawGraphQL] = Field(None, repr=False, alias="bidMatrixRaw") + input_: Optional[ShopPartialBidCalculationInputGraphQL] = Field(None, repr=False, alias="input") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "bid_matrix_raw", "input_", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> ShopPartialBidCalculationOutput: + """Convert this GraphQL format of shop partial bid calculation output to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return ShopPartialBidCalculationOutput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + bid_matrix_raw=( + self.bid_matrix_raw.as_read() if isinstance(self.bid_matrix_raw, GraphQLCore) else self.bid_matrix_raw + ), + input_=self.input_.as_read() if isinstance(self.input_, GraphQLCore) else self.input_, + ) + + def as_write(self) -> ShopPartialBidCalculationOutputWrite: + """Convert this GraphQL format of shop partial bid calculation output to the writing format.""" + return ShopPartialBidCalculationOutputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + bid_matrix_raw=( + self.bid_matrix_raw.as_write() if isinstance(self.bid_matrix_raw, DomainModel) else self.bid_matrix_raw + ), + input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, + ) + + class ShopPartialBidCalculationOutput(DomainModel): """This represents the reading version of shop partial bid calculation output. @@ -73,7 +173,7 @@ class ShopPartialBidCalculationOutput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) bid_matrix_raw: Union[MultiScenarioMatrixRaw, str, dm.NodeId, None] = Field(None, repr=False, alias="bidMatrixRaw") input_: Union[ShopPartialBidCalculationInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") @@ -130,7 +230,7 @@ class ShopPartialBidCalculationOutputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) bid_matrix_raw: Union[MultiScenarioMatrixRawWrite, str, dm.NodeId, None] = Field( None, repr=False, alias="bidMatrixRaw" ) @@ -141,6 +241,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -182,7 +283,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -197,7 +298,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_result.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_result.py index 46ba75f00..47669d8cc 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_result.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_result.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,13 +19,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._case import Case, CaseWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._case import Case, CaseGraphQL, CaseWrite __all__ = [ @@ -54,6 +58,94 @@ } +class SHOPResultGraphQL(GraphQLCore): + """This represents the reading version of shop result, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop result. + data_record: The data record of the shop result node. + case: The case that was used to produce this result + output_timeseries: A general placeholder for all timeseries that stem from a shop run + objective_sequence: The sequence of the objective function + pre_run: The pre-run data for the SHOP run + post_run: The post-run data for the SHOP run + shop_messages: The messages from the SHOP run + cplex_logs: The logs from CPLEX + alerts: An array of calculation level Alerts. + """ + + view_id = dm.ViewId("sp_powerops_models", "SHOPResult", "1") + case: Optional[CaseGraphQL] = Field(None, repr=False) + output_timeseries: Union[list[TimeSeries], list[str], None] = Field(None, alias="outputTimeseries") + objective_sequence: Union[str, None] = Field(None, alias="objectiveSequence") + pre_run: Union[str, None] = Field(None, alias="preRun") + post_run: Union[str, None] = Field(None, alias="postRun") + shop_messages: Union[str, None] = Field(None, alias="shopMessages") + cplex_logs: Union[str, None] = Field(None, alias="cplexLogs") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("case", "alerts", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> SHOPResult: + """Convert this GraphQL format of shop result to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return SHOPResult( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + case=self.case.as_read() if isinstance(self.case, GraphQLCore) else self.case, + output_timeseries=self.output_timeseries, + objective_sequence=self.objective_sequence, + pre_run=self.pre_run, + post_run=self.post_run, + shop_messages=self.shop_messages, + cplex_logs=self.cplex_logs, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + ) + + def as_write(self) -> SHOPResultWrite: + """Convert this GraphQL format of shop result to the writing format.""" + return SHOPResultWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + case=self.case.as_write() if isinstance(self.case, DomainModel) else self.case, + output_timeseries=self.output_timeseries, + objective_sequence=self.objective_sequence, + pre_run=self.pre_run, + post_run=self.post_run, + shop_messages=self.shop_messages, + cplex_logs=self.cplex_logs, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + ) + + class SHOPResult(DomainModel): """This represents the reading version of shop result. @@ -82,7 +174,7 @@ class SHOPResult(DomainModel): post_run: Union[str, None] = Field(None, alias="postRun") shop_messages: Union[str, None] = Field(None, alias="shopMessages") cplex_logs: Union[str, None] = Field(None, alias="cplexLogs") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> SHOPResultWrite: """Convert this read version of shop result to the writing version.""" @@ -138,13 +230,14 @@ class SHOPResultWrite(DomainModelWrite): post_run: Union[str, None] = Field(None, alias="postRun") shop_messages: Union[str, None] = Field(None, alias="shopMessages") cplex_logs: Union[str, None] = Field(None, alias="cplexLogs") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -184,7 +277,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -199,7 +292,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_result_price_prod.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_result_price_prod.py index a3b0aafdc..925287208 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_result_price_prod.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_result_price_prod.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,15 +19,16 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) from ._shop_result import SHOPResult, SHOPResultWrite if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._case import Case, CaseWrite - from ._shop_time_series import SHOPTimeSeries, SHOPTimeSeriesWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._case import Case, CaseGraphQL, CaseWrite + from ._shop_time_series import SHOPTimeSeries, SHOPTimeSeriesGraphQL, SHOPTimeSeriesWrite __all__ = [ @@ -56,6 +60,118 @@ } +class SHOPResultPriceProdGraphQL(GraphQLCore): + """This represents the reading version of shop result price prod, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop result price prod. + data_record: The data record of the shop result price prod node. + case: The case that was used to produce this result + output_timeseries: A general placeholder for all timeseries that stem from a shop run + objective_sequence: The sequence of the objective function + pre_run: The pre-run data for the SHOP run + post_run: The post-run data for the SHOP run + shop_messages: The messages from the SHOP run + cplex_logs: The logs from CPLEX + alerts: An array of calculation level Alerts. + price_timeseries: The market price timeseries from the Shop run + production_timeseries: The production timeseries wrapped as a ShopTimeSeries object containing properties related to their names and types in the resulting output shop file + """ + + view_id = dm.ViewId("sp_powerops_models", "SHOPResultPriceProd", "1") + case: Optional[CaseGraphQL] = Field(None, repr=False) + output_timeseries: Union[list[TimeSeries], list[str], None] = Field(None, alias="outputTimeseries") + objective_sequence: Union[str, None] = Field(None, alias="objectiveSequence") + pre_run: Union[str, None] = Field(None, alias="preRun") + post_run: Union[str, None] = Field(None, alias="postRun") + shop_messages: Union[str, None] = Field(None, alias="shopMessages") + cplex_logs: Union[str, None] = Field(None, alias="cplexLogs") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + price_timeseries: Optional[SHOPTimeSeriesGraphQL] = Field(None, repr=False, alias="priceTimeseries") + production_timeseries: Optional[list[SHOPTimeSeriesGraphQL]] = Field( + default=None, repr=False, alias="productionTimeseries" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("case", "alerts", "price_timeseries", "production_timeseries", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> SHOPResultPriceProd: + """Convert this GraphQL format of shop result price prod to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return SHOPResultPriceProd( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + case=self.case.as_read() if isinstance(self.case, GraphQLCore) else self.case, + output_timeseries=self.output_timeseries, + objective_sequence=self.objective_sequence, + pre_run=self.pre_run, + post_run=self.post_run, + shop_messages=self.shop_messages, + cplex_logs=self.cplex_logs, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + price_timeseries=( + self.price_timeseries.as_read() + if isinstance(self.price_timeseries, GraphQLCore) + else self.price_timeseries + ), + production_timeseries=[ + production_timesery.as_read() if isinstance(production_timesery, GraphQLCore) else production_timesery + for production_timesery in self.production_timeseries or [] + ], + ) + + def as_write(self) -> SHOPResultPriceProdWrite: + """Convert this GraphQL format of shop result price prod to the writing format.""" + return SHOPResultPriceProdWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + case=self.case.as_write() if isinstance(self.case, DomainModel) else self.case, + output_timeseries=self.output_timeseries, + objective_sequence=self.objective_sequence, + pre_run=self.pre_run, + post_run=self.post_run, + shop_messages=self.shop_messages, + cplex_logs=self.cplex_logs, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + price_timeseries=( + self.price_timeseries.as_write() + if isinstance(self.price_timeseries, DomainModel) + else self.price_timeseries + ), + production_timeseries=[ + production_timesery.as_write() if isinstance(production_timesery, DomainModel) else production_timesery + for production_timesery in self.production_timeseries or [] + ], + ) + + class SHOPResultPriceProd(SHOPResult): """This represents the reading version of shop result price prod. @@ -81,7 +197,7 @@ class SHOPResultPriceProd(SHOPResult): "sp_powerops_types", "SHOPResultPriceProd" ) price_timeseries: Union[SHOPTimeSeries, str, dm.NodeId, None] = Field(None, repr=False, alias="priceTimeseries") - production_timeseries: Union[list[SHOPTimeSeries], list[str], None] = Field( + production_timeseries: Union[list[SHOPTimeSeries], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="productionTimeseries" ) @@ -147,7 +263,7 @@ class SHOPResultPriceProdWrite(SHOPResultWrite): price_timeseries: Union[SHOPTimeSeriesWrite, str, dm.NodeId, None] = Field( None, repr=False, alias="priceTimeseries" ) - production_timeseries: Union[list[SHOPTimeSeriesWrite], list[str], None] = Field( + production_timeseries: Union[list[SHOPTimeSeriesWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="productionTimeseries" ) @@ -156,6 +272,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -207,7 +324,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -222,7 +339,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) @@ -234,6 +357,8 @@ def _to_instances_write( end_node=production_timesery, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_time_series.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_time_series.py index 5053f6630..76f0bd7f1 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_time_series.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_time_series.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,6 +19,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) @@ -44,6 +48,70 @@ } +class SHOPTimeSeriesGraphQL(GraphQLCore): + """This represents the reading version of shop time series, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop time series. + data_record: The data record of the shop time series node. + object_type: The type of the object + object_name: The name of the object + attribute_name: The name of the attribute + timeseries: Timeseries object from output of SHOP stored as a timeseries in cdf + """ + + view_id = dm.ViewId("sp_powerops_models", "SHOPTimeSeries", "1") + object_type: Optional[str] = Field(None, alias="objectType") + object_name: Optional[str] = Field(None, alias="objectName") + attribute_name: Optional[str] = Field(None, alias="attributeName") + timeseries: Union[TimeSeries, str, None] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> SHOPTimeSeries: + """Convert this GraphQL format of shop time series to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return SHOPTimeSeries( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + object_type=self.object_type, + object_name=self.object_name, + attribute_name=self.attribute_name, + timeseries=self.timeseries, + ) + + def as_write(self) -> SHOPTimeSeriesWrite: + """Convert this GraphQL format of shop time series to the writing format.""" + return SHOPTimeSeriesWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + object_type=self.object_type, + object_name=self.object_name, + attribute_name=self.attribute_name, + timeseries=self.timeseries, + ) + + class SHOPTimeSeries(DomainModel): """This represents the reading version of shop time series. @@ -119,6 +187,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -149,7 +218,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_trigger_input.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_trigger_input.py index 6edff1f2e..4a16bdbe3 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_trigger_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_trigger_input.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,11 +18,12 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._scenario import Scenario, ScenarioWrite + from ._scenario import Scenario, ScenarioGraphQL, ScenarioWrite __all__ = [ @@ -46,6 +50,86 @@ } +class SHOPTriggerInputGraphQL(GraphQLCore): + """This represents the reading version of shop trigger input, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop trigger input. + data_record: The data record of the shop trigger input node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + cog_shop_tag: Optionally specify cogshop tag to trigger + scenario: The scenario that is used in the shop run + """ + + view_id = dm.ViewId("sp_powerops_models", "SHOPTriggerInput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + cog_shop_tag: Optional[str] = Field(None, alias="cogShopTag") + scenario: Optional[ScenarioGraphQL] = Field(None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("scenario", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> SHOPTriggerInput: + """Convert this GraphQL format of shop trigger input to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return SHOPTriggerInput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + cog_shop_tag=self.cog_shop_tag, + scenario=self.scenario.as_read() if isinstance(self.scenario, GraphQLCore) else self.scenario, + ) + + def as_write(self) -> SHOPTriggerInputWrite: + """Convert this GraphQL format of shop trigger input to the writing format.""" + return SHOPTriggerInputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + cog_shop_tag=self.cog_shop_tag, + scenario=self.scenario.as_write() if isinstance(self.scenario, DomainModel) else self.scenario, + ) + + class SHOPTriggerInput(DomainModel): """This represents the reading version of shop trigger input. @@ -131,6 +215,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -167,7 +252,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_trigger_output.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_trigger_output.py index 73b5f1c0c..1e6ee7409 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_trigger_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_trigger_output.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,14 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._shop_result import SHOPResult, SHOPResultWrite - from ._shop_trigger_input import SHOPTriggerInput, SHOPTriggerInputWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._shop_result import SHOPResult, SHOPResultGraphQL, SHOPResultWrite + from ._shop_trigger_input import SHOPTriggerInput, SHOPTriggerInputGraphQL, SHOPTriggerInputWrite __all__ = [ @@ -47,6 +51,90 @@ } +class SHOPTriggerOutputGraphQL(GraphQLCore): + """This represents the reading version of shop trigger output, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the shop trigger output. + data_record: The data record of the shop trigger output node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + alerts: An array of calculation level Alerts. + shop_result: The shop result field. + input_: The prepped and processed scenario to send to shop trigger + """ + + view_id = dm.ViewId("sp_powerops_models", "SHOPTriggerOutput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + shop_result: Optional[SHOPResultGraphQL] = Field(None, repr=False, alias="shopResult") + input_: Optional[SHOPTriggerInputGraphQL] = Field(None, repr=False, alias="input") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "shop_result", "input_", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> SHOPTriggerOutput: + """Convert this GraphQL format of shop trigger output to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return SHOPTriggerOutput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + shop_result=self.shop_result.as_read() if isinstance(self.shop_result, GraphQLCore) else self.shop_result, + input_=self.input_.as_read() if isinstance(self.input_, GraphQLCore) else self.input_, + ) + + def as_write(self) -> SHOPTriggerOutputWrite: + """Convert this GraphQL format of shop trigger output to the writing format.""" + return SHOPTriggerOutputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + shop_result=self.shop_result.as_write() if isinstance(self.shop_result, DomainModel) else self.shop_result, + input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, + ) + + class SHOPTriggerOutput(DomainModel): """This represents the reading version of shop trigger output. @@ -73,7 +161,7 @@ class SHOPTriggerOutput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) shop_result: Union[SHOPResult, str, dm.NodeId, None] = Field(None, repr=False, alias="shopResult") input_: Union[SHOPTriggerInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") @@ -128,7 +216,7 @@ class SHOPTriggerOutputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) shop_result: Union[SHOPResultWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="shopResult") input_: Union[SHOPTriggerInputWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="input") @@ -137,6 +225,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -176,7 +265,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -191,7 +280,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_shop_input.py b/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_shop_input.py index 38c3c95d4..65732f85c 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_shop_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_shop_input.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,11 +19,12 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._bid_configuration_shop import BidConfigurationShop, BidConfigurationShopWrite + from ._bid_configuration_shop import BidConfigurationShop, BidConfigurationShopGraphQL, BidConfigurationShopWrite __all__ = [ @@ -51,6 +55,102 @@ } +class TaskDispatcherShopInputGraphQL(GraphQLCore): + """This represents the reading version of task dispatcher shop input, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the task dispatcher shop input. + data_record: The data record of the task dispatcher shop input node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + bid_configuration: The bid configuration field. + bid_date: The bid date + shop_start: The shop start date + shop_end: The shop end date + """ + + view_id = dm.ViewId("sp_powerops_models", "TaskDispatcherShopInput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + bid_configuration: Optional[BidConfigurationShopGraphQL] = Field(None, repr=False, alias="bidConfiguration") + bid_date: Optional[datetime.date] = Field(None, alias="bidDate") + shop_start: Optional[datetime.date] = Field(None, alias="shopStart") + shop_end: Optional[datetime.date] = Field(None, alias="shopEnd") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("bid_configuration", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> TaskDispatcherShopInput: + """Convert this GraphQL format of task dispatcher shop input to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return TaskDispatcherShopInput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + bid_configuration=( + self.bid_configuration.as_read() + if isinstance(self.bid_configuration, GraphQLCore) + else self.bid_configuration + ), + bid_date=self.bid_date, + shop_start=self.shop_start, + shop_end=self.shop_end, + ) + + def as_write(self) -> TaskDispatcherShopInputWrite: + """Convert this GraphQL format of task dispatcher shop input to the writing format.""" + return TaskDispatcherShopInputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + bid_configuration=( + self.bid_configuration.as_write() + if isinstance(self.bid_configuration, DomainModel) + else self.bid_configuration + ), + bid_date=self.bid_date, + shop_start=self.shop_start, + shop_end=self.shop_end, + ) + + class TaskDispatcherShopInput(DomainModel): """This represents the reading version of task dispatcher shop input. @@ -154,6 +254,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -200,7 +301,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_shop_output.py b/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_shop_output.py index 731572bec..93489a6d9 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_shop_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_shop_output.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,14 +18,23 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._preprocessor_input import PreprocessorInput, PreprocessorInputWrite - from ._shop_partial_bid_calculation_input import ShopPartialBidCalculationInput, ShopPartialBidCalculationInputWrite - from ._task_dispatcher_shop_input import TaskDispatcherShopInput, TaskDispatcherShopInputWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._preprocessor_input import PreprocessorInput, PreprocessorInputGraphQL, PreprocessorInputWrite + from ._shop_partial_bid_calculation_input import ( + ShopPartialBidCalculationInput, + ShopPartialBidCalculationInputGraphQL, + ShopPartialBidCalculationInputWrite, + ) + from ._task_dispatcher_shop_input import ( + TaskDispatcherShopInput, + TaskDispatcherShopInputGraphQL, + TaskDispatcherShopInputWrite, + ) __all__ = [ @@ -48,6 +60,126 @@ } +class TaskDispatcherShopOutputGraphQL(GraphQLCore): + """This represents the reading version of task dispatcher shop output, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the task dispatcher shop output. + data_record: The data record of the task dispatcher shop output node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + alerts: An array of calculation level Alerts. + input_: The previous step in the process. + partial_bid_calculations: An array of preprocessor calculations/inputs to preprocessor function. + preprocessor_calculations: An array of preprocessor calculations/inputs to preprocessor function. + """ + + view_id = dm.ViewId("sp_powerops_models", "TaskDispatcherShopOutput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + input_: Optional[TaskDispatcherShopInputGraphQL] = Field(None, repr=False, alias="input") + partial_bid_calculations: Optional[list[ShopPartialBidCalculationInputGraphQL]] = Field( + default=None, repr=False, alias="partialBidCalculations" + ) + preprocessor_calculations: Optional[list[PreprocessorInputGraphQL]] = Field( + default=None, repr=False, alias="preprocessorCalculations" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "input_", "partial_bid_calculations", "preprocessor_calculations", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> TaskDispatcherShopOutput: + """Convert this GraphQL format of task dispatcher shop output to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return TaskDispatcherShopOutput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + input_=self.input_.as_read() if isinstance(self.input_, GraphQLCore) else self.input_, + partial_bid_calculations=[ + ( + partial_bid_calculation.as_read() + if isinstance(partial_bid_calculation, GraphQLCore) + else partial_bid_calculation + ) + for partial_bid_calculation in self.partial_bid_calculations or [] + ], + preprocessor_calculations=[ + ( + preprocessor_calculation.as_read() + if isinstance(preprocessor_calculation, GraphQLCore) + else preprocessor_calculation + ) + for preprocessor_calculation in self.preprocessor_calculations or [] + ], + ) + + def as_write(self) -> TaskDispatcherShopOutputWrite: + """Convert this GraphQL format of task dispatcher shop output to the writing format.""" + return TaskDispatcherShopOutputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, + partial_bid_calculations=[ + ( + partial_bid_calculation.as_write() + if isinstance(partial_bid_calculation, DomainModel) + else partial_bid_calculation + ) + for partial_bid_calculation in self.partial_bid_calculations or [] + ], + preprocessor_calculations=[ + ( + preprocessor_calculation.as_write() + if isinstance(preprocessor_calculation, DomainModel) + else preprocessor_calculation + ) + for preprocessor_calculation in self.preprocessor_calculations or [] + ], + ) + + class TaskDispatcherShopOutput(DomainModel): """This represents the reading version of task dispatcher shop output. @@ -75,12 +207,12 @@ class TaskDispatcherShopOutput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) input_: Union[TaskDispatcherShopInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") - partial_bid_calculations: Union[list[ShopPartialBidCalculationInput], list[str], None] = Field( + partial_bid_calculations: Union[list[ShopPartialBidCalculationInput], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="partialBidCalculations" ) - preprocessor_calculations: Union[list[PreprocessorInput], list[str], None] = Field( + preprocessor_calculations: Union[list[PreprocessorInput], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="preprocessorCalculations" ) @@ -151,12 +283,12 @@ class TaskDispatcherShopOutputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) input_: Union[TaskDispatcherShopInputWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="input") - partial_bid_calculations: Union[list[ShopPartialBidCalculationInputWrite], list[str], None] = Field( - default=None, repr=False, alias="partialBidCalculations" + partial_bid_calculations: Union[list[ShopPartialBidCalculationInputWrite], list[str], list[dm.NodeId], None] = ( + Field(default=None, repr=False, alias="partialBidCalculations") ) - preprocessor_calculations: Union[list[PreprocessorInputWrite], list[str], None] = Field( + preprocessor_calculations: Union[list[PreprocessorInputWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="preprocessorCalculations" ) @@ -165,6 +297,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -198,7 +331,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -213,7 +346,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) @@ -225,6 +364,8 @@ def _to_instances_write( end_node=partial_bid_calculation, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) @@ -236,6 +377,8 @@ def _to_instances_write( end_node=preprocessor_calculation, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_water_input.py b/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_water_input.py index 9bf1fcf8e..a183ef2d1 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_water_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_water_input.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,11 +19,16 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._bid_configuration_water import BidConfigurationWater, BidConfigurationWaterWrite + from ._bid_configuration_water import ( + BidConfigurationWater, + BidConfigurationWaterGraphQL, + BidConfigurationWaterWrite, + ) __all__ = [ @@ -47,6 +55,94 @@ } +class TaskDispatcherWaterInputGraphQL(GraphQLCore): + """This represents the reading version of task dispatcher water input, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the task dispatcher water input. + data_record: The data record of the task dispatcher water input node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + bid_configuration: The bid configuration field. + bid_date: The bid date + """ + + view_id = dm.ViewId("sp_powerops_models", "TaskDispatcherWaterInput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + bid_configuration: Optional[BidConfigurationWaterGraphQL] = Field(None, repr=False, alias="bidConfiguration") + bid_date: Optional[datetime.date] = Field(None, alias="bidDate") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("bid_configuration", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> TaskDispatcherWaterInput: + """Convert this GraphQL format of task dispatcher water input to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return TaskDispatcherWaterInput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + bid_configuration=( + self.bid_configuration.as_read() + if isinstance(self.bid_configuration, GraphQLCore) + else self.bid_configuration + ), + bid_date=self.bid_date, + ) + + def as_write(self) -> TaskDispatcherWaterInputWrite: + """Convert this GraphQL format of task dispatcher water input to the writing format.""" + return TaskDispatcherWaterInputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + bid_configuration=( + self.bid_configuration.as_write() + if isinstance(self.bid_configuration, DomainModel) + else self.bid_configuration + ), + bid_date=self.bid_date, + ) + + class TaskDispatcherWaterInput(DomainModel): """This represents the reading version of task dispatcher water input. @@ -140,6 +236,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -180,7 +277,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_water_output.py b/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_water_output.py index 9b23a542b..8d0192cd6 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_water_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_task_dispatcher_water_output.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,14 +18,20 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._task_dispatcher_water_input import TaskDispatcherWaterInput, TaskDispatcherWaterInputWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._task_dispatcher_water_input import ( + TaskDispatcherWaterInput, + TaskDispatcherWaterInputGraphQL, + TaskDispatcherWaterInputWrite, + ) from ._water_partial_bid_calculation_input import ( WaterPartialBidCalculationInput, + WaterPartialBidCalculationInputGraphQL, WaterPartialBidCalculationInputWrite, ) @@ -50,6 +59,106 @@ } +class TaskDispatcherWaterOutputGraphQL(GraphQLCore): + """This represents the reading version of task dispatcher water output, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the task dispatcher water output. + data_record: The data record of the task dispatcher water output node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + alerts: An array of calculation level Alerts. + input_: The previous step in the process. + bid_calculation_tasks: An array of bid calculation tasks. + """ + + view_id = dm.ViewId("sp_powerops_models", "TaskDispatcherWaterOutput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + input_: Optional[TaskDispatcherWaterInputGraphQL] = Field(None, repr=False, alias="input") + bid_calculation_tasks: Optional[list[WaterPartialBidCalculationInputGraphQL]] = Field( + default=None, repr=False, alias="bidCalculationTasks" + ) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "input_", "bid_calculation_tasks", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> TaskDispatcherWaterOutput: + """Convert this GraphQL format of task dispatcher water output to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return TaskDispatcherWaterOutput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + input_=self.input_.as_read() if isinstance(self.input_, GraphQLCore) else self.input_, + bid_calculation_tasks=[ + ( + bid_calculation_task.as_read() + if isinstance(bid_calculation_task, GraphQLCore) + else bid_calculation_task + ) + for bid_calculation_task in self.bid_calculation_tasks or [] + ], + ) + + def as_write(self) -> TaskDispatcherWaterOutputWrite: + """Convert this GraphQL format of task dispatcher water output to the writing format.""" + return TaskDispatcherWaterOutputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, + bid_calculation_tasks=[ + ( + bid_calculation_task.as_write() + if isinstance(bid_calculation_task, DomainModel) + else bid_calculation_task + ) + for bid_calculation_task in self.bid_calculation_tasks or [] + ], + ) + + class TaskDispatcherWaterOutput(DomainModel): """This represents the reading version of task dispatcher water output. @@ -76,9 +185,9 @@ class TaskDispatcherWaterOutput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) input_: Union[TaskDispatcherWaterInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") - bid_calculation_tasks: Union[list[WaterPartialBidCalculationInput], list[str], None] = Field( + bid_calculation_tasks: Union[list[WaterPartialBidCalculationInput], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="bidCalculationTasks" ) @@ -140,9 +249,9 @@ class TaskDispatcherWaterOutputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) input_: Union[TaskDispatcherWaterInputWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="input") - bid_calculation_tasks: Union[list[WaterPartialBidCalculationInputWrite], list[str], None] = Field( + bid_calculation_tasks: Union[list[WaterPartialBidCalculationInputWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="bidCalculationTasks" ) @@ -151,6 +260,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -184,7 +294,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -199,7 +309,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) @@ -211,6 +327,8 @@ def _to_instances_write( end_node=bid_calculation_task, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_total_bid_matrix_calculation_input.py b/cognite/powerops/client/_generated/v1/data_classes/_total_bid_matrix_calculation_input.py index 2fbd3890b..7a14b6e08 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_total_bid_matrix_calculation_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_total_bid_matrix_calculation_input.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,11 +18,12 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._bid_matrix import BidMatrix, BidMatrixWrite + from ._bid_matrix import BidMatrix, BidMatrixGraphQL, BidMatrixWrite __all__ = [ @@ -45,6 +49,88 @@ } +class TotalBidMatrixCalculationInputGraphQL(GraphQLCore): + """This represents the reading version of total bid matrix calculation input, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the total bid matrix calculation input. + data_record: The data record of the total bid matrix calculation input node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + partial_bid_matrices: The partial bid matrices that are used to calculate the total bid matrix. + """ + + view_id = dm.ViewId("sp_powerops_models", "TotalBidMatrixCalculationInput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + partial_bid_matrices: Optional[list[BidMatrixGraphQL]] = Field(default=None, repr=False, alias="partialBidMatrices") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("partial_bid_matrices", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> TotalBidMatrixCalculationInput: + """Convert this GraphQL format of total bid matrix calculation input to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return TotalBidMatrixCalculationInput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + partial_bid_matrices=[ + partial_bid_matrice.as_read() if isinstance(partial_bid_matrice, GraphQLCore) else partial_bid_matrice + for partial_bid_matrice in self.partial_bid_matrices or [] + ], + ) + + def as_write(self) -> TotalBidMatrixCalculationInputWrite: + """Convert this GraphQL format of total bid matrix calculation input to the writing format.""" + return TotalBidMatrixCalculationInputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + partial_bid_matrices=[ + partial_bid_matrice.as_write() if isinstance(partial_bid_matrice, DomainModel) else partial_bid_matrice + for partial_bid_matrice in self.partial_bid_matrices or [] + ], + ) + + class TotalBidMatrixCalculationInput(DomainModel): """This represents the reading version of total bid matrix calculation input. @@ -69,7 +155,7 @@ class TotalBidMatrixCalculationInput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - partial_bid_matrices: Union[list[BidMatrix], list[str], None] = Field( + partial_bid_matrices: Union[list[BidMatrix], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="partialBidMatrices" ) @@ -123,7 +209,7 @@ class TotalBidMatrixCalculationInputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - partial_bid_matrices: Union[list[BidMatrixWrite], list[str], None] = Field( + partial_bid_matrices: Union[list[BidMatrixWrite], list[str], list[dm.NodeId], None] = Field( default=None, repr=False, alias="partialBidMatrices" ) @@ -132,6 +218,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -159,7 +246,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -179,6 +266,8 @@ def _to_instances_write( end_node=partial_bid_matrice, edge_type=edge_type, view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_total_bid_matrix_calculation_output.py b/cognite/powerops/client/_generated/v1/data_classes/_total_bid_matrix_calculation_output.py index fd4f73955..40315443d 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_total_bid_matrix_calculation_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_total_bid_matrix_calculation_output.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,13 +18,18 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_document_day_ahead import BidDocumentDayAhead, BidDocumentDayAheadWrite - from ._total_bid_matrix_calculation_input import TotalBidMatrixCalculationInput, TotalBidMatrixCalculationInputWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_document_day_ahead import BidDocumentDayAhead, BidDocumentDayAheadGraphQL, BidDocumentDayAheadWrite + from ._total_bid_matrix_calculation_input import ( + TotalBidMatrixCalculationInput, + TotalBidMatrixCalculationInputGraphQL, + TotalBidMatrixCalculationInputWrite, + ) __all__ = [ @@ -47,6 +55,94 @@ } +class TotalBidMatrixCalculationOutputGraphQL(GraphQLCore): + """This represents the reading version of total bid matrix calculation output, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the total bid matrix calculation output. + data_record: The data record of the total bid matrix calculation output node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + alerts: An array of calculation level Alerts. + bid_document: The bid document field. + input_: The previous step in the process. + """ + + view_id = dm.ViewId("sp_powerops_models", "TotalBidMatrixCalculationOutput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + bid_document: Optional[BidDocumentDayAheadGraphQL] = Field(None, repr=False, alias="bidDocument") + input_: Optional[TotalBidMatrixCalculationInputGraphQL] = Field(None, repr=False, alias="input") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "bid_document", "input_", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> TotalBidMatrixCalculationOutput: + """Convert this GraphQL format of total bid matrix calculation output to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return TotalBidMatrixCalculationOutput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + bid_document=( + self.bid_document.as_read() if isinstance(self.bid_document, GraphQLCore) else self.bid_document + ), + input_=self.input_.as_read() if isinstance(self.input_, GraphQLCore) else self.input_, + ) + + def as_write(self) -> TotalBidMatrixCalculationOutputWrite: + """Convert this GraphQL format of total bid matrix calculation output to the writing format.""" + return TotalBidMatrixCalculationOutputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + bid_document=( + self.bid_document.as_write() if isinstance(self.bid_document, DomainModel) else self.bid_document + ), + input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, + ) + + class TotalBidMatrixCalculationOutput(DomainModel): """This represents the reading version of total bid matrix calculation output. @@ -73,7 +169,7 @@ class TotalBidMatrixCalculationOutput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) bid_document: Union[BidDocumentDayAhead, str, dm.NodeId, None] = Field(None, repr=False, alias="bidDocument") input_: Union[TotalBidMatrixCalculationInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") @@ -130,7 +226,7 @@ class TotalBidMatrixCalculationOutputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) bid_document: Union[BidDocumentDayAheadWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="bidDocument") input_: Union[TotalBidMatrixCalculationInputWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="input") @@ -139,6 +235,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -180,7 +277,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -195,7 +292,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_turbine_efficiency_curve.py b/cognite/powerops/client/_generated/v1/data_classes/_turbine_efficiency_curve.py index b73c85924..f151e6f1b 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_turbine_efficiency_curve.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_turbine_efficiency_curve.py @@ -4,9 +4,12 @@ from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -14,6 +17,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -37,6 +41,66 @@ } +class TurbineEfficiencyCurveGraphQL(GraphQLCore): + """This represents the reading version of turbine efficiency curve, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the turbine efficiency curve. + data_record: The data record of the turbine efficiency curve node. + head: The reference head values + flow: The flow values + efficiency: The turbine efficiency values + """ + + view_id = dm.ViewId("sp_powerops_models", "TurbineEfficiencyCurve", "1") + head: Optional[float] = None + flow: Optional[list[float]] = None + efficiency: Optional[list[float]] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> TurbineEfficiencyCurve: + """Convert this GraphQL format of turbine efficiency curve to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return TurbineEfficiencyCurve( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + head=self.head, + flow=self.flow, + efficiency=self.efficiency, + ) + + def as_write(self) -> TurbineEfficiencyCurveWrite: + """Convert this GraphQL format of turbine efficiency curve to the writing format.""" + return TurbineEfficiencyCurveWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + head=self.head, + flow=self.flow, + efficiency=self.efficiency, + ) + + class TurbineEfficiencyCurve(DomainModel): """This represents the reading version of turbine efficiency curve. @@ -107,6 +171,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -131,7 +196,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_water_partial_bid_calculation_input.py b/cognite/powerops/client/_generated/v1/data_classes/_water_partial_bid_calculation_input.py index ba8636dbe..1c0083771 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_water_partial_bid_calculation_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_water_partial_bid_calculation_input.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,11 +18,12 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._bid_calculation_task import BidCalculationTask, BidCalculationTaskWrite + from ._bid_calculation_task import BidCalculationTask, BidCalculationTaskGraphQL, BidCalculationTaskWrite __all__ = [ @@ -45,6 +49,90 @@ } +class WaterPartialBidCalculationInputGraphQL(GraphQLCore): + """This represents the reading version of water partial bid calculation input, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the water partial bid calculation input. + data_record: The data record of the water partial bid calculation input node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + calculation_task: The calculation task field. + """ + + view_id = dm.ViewId("sp_powerops_models", "WaterPartialBidCalculationInput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + calculation_task: Optional[BidCalculationTaskGraphQL] = Field(None, repr=False, alias="calculationTask") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("calculation_task", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> WaterPartialBidCalculationInput: + """Convert this GraphQL format of water partial bid calculation input to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return WaterPartialBidCalculationInput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + calculation_task=( + self.calculation_task.as_read() + if isinstance(self.calculation_task, GraphQLCore) + else self.calculation_task + ), + ) + + def as_write(self) -> WaterPartialBidCalculationInputWrite: + """Convert this GraphQL format of water partial bid calculation input to the writing format.""" + return WaterPartialBidCalculationInputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + calculation_task=( + self.calculation_task.as_write() + if isinstance(self.calculation_task, DomainModel) + else self.calculation_task + ), + ) + + class WaterPartialBidCalculationInput(DomainModel): """This represents the reading version of water partial bid calculation input. @@ -131,6 +219,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -168,7 +257,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_water_partial_bid_calculation_output.py b/cognite/powerops/client/_generated/v1/data_classes/_water_partial_bid_calculation_output.py index d751db931..40f25e07f 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_water_partial_bid_calculation_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_water_partial_bid_calculation_output.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,14 +18,16 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite - from ._bid_matrix_raw import BidMatrixRaw, BidMatrixRawWrite + from ._alert import Alert, AlertGraphQL, AlertWrite + from ._bid_matrix_raw import BidMatrixRaw, BidMatrixRawGraphQL, BidMatrixRawWrite from ._water_partial_bid_calculation_input import ( WaterPartialBidCalculationInput, + WaterPartialBidCalculationInputGraphQL, WaterPartialBidCalculationInputWrite, ) @@ -50,6 +55,98 @@ } +class WaterPartialBidCalculationOutputGraphQL(GraphQLCore): + """This represents the reading version of water partial bid calculation output, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the water partial bid calculation output. + data_record: The data record of the water partial bid calculation output node. + process_id: The process associated with the function execution + process_step: This is the step in the process. + function_name: The name of the function + function_call_id: The function call id + alerts: An array of calculation level Alerts. + raw_partial_matrix: The raw partial matrix field. + input_: The input field. + """ + + view_id = dm.ViewId("sp_powerops_models", "WaterPartialBidCalculationOutput", "1") + process_id: Optional[str] = Field(None, alias="processId") + process_step: Optional[int] = Field(None, alias="processStep") + function_name: Optional[str] = Field(None, alias="functionName") + function_call_id: Optional[str] = Field(None, alias="functionCallId") + alerts: Optional[list[AlertGraphQL]] = Field(default=None, repr=False) + raw_partial_matrix: Optional[BidMatrixRawGraphQL] = Field(None, repr=False, alias="rawPartialMatrix") + input_: Optional[WaterPartialBidCalculationInputGraphQL] = Field(None, repr=False, alias="input") + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("alerts", "raw_partial_matrix", "input_", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> WaterPartialBidCalculationOutput: + """Convert this GraphQL format of water partial bid calculation output to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return WaterPartialBidCalculationOutput( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_read() if isinstance(alert, GraphQLCore) else alert for alert in self.alerts or []], + raw_partial_matrix=( + self.raw_partial_matrix.as_read() + if isinstance(self.raw_partial_matrix, GraphQLCore) + else self.raw_partial_matrix + ), + input_=self.input_.as_read() if isinstance(self.input_, GraphQLCore) else self.input_, + ) + + def as_write(self) -> WaterPartialBidCalculationOutputWrite: + """Convert this GraphQL format of water partial bid calculation output to the writing format.""" + return WaterPartialBidCalculationOutputWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + process_id=self.process_id, + process_step=self.process_step, + function_name=self.function_name, + function_call_id=self.function_call_id, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + raw_partial_matrix=( + self.raw_partial_matrix.as_write() + if isinstance(self.raw_partial_matrix, DomainModel) + else self.raw_partial_matrix + ), + input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, + ) + + class WaterPartialBidCalculationOutput(DomainModel): """This represents the reading version of water partial bid calculation output. @@ -76,7 +173,7 @@ class WaterPartialBidCalculationOutput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[Alert], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) raw_partial_matrix: Union[BidMatrixRaw, str, dm.NodeId, None] = Field(None, repr=False, alias="rawPartialMatrix") input_: Union[WaterPartialBidCalculationInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") @@ -135,7 +232,7 @@ class WaterPartialBidCalculationOutputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) + alerts: Union[list[AlertWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) raw_partial_matrix: Union[BidMatrixRawWrite, str, dm.NodeId, None] = Field( None, repr=False, alias="rawPartialMatrix" ) @@ -146,6 +243,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -189,7 +287,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -204,7 +302,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") for alert in self.alerts or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=alert, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_watercourse.py b/cognite/powerops/client/_generated/v1/data_classes/_watercourse.py index d6750f8ad..fa4e0aa11 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_watercourse.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_watercourse.py @@ -6,9 +6,12 @@ from cognite.client import data_modeling as dm from cognite.client.data_classes import TimeSeries as CogniteTimeSeries from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -16,12 +19,13 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, TimeSeries, ) if TYPE_CHECKING: - from ._plant import Plant, PlantWrite + from ._plant import Plant, PlantGraphQL, PlantWrite __all__ = [ @@ -48,6 +52,86 @@ } +class WatercourseGraphQL(GraphQLCore): + """This represents the reading version of watercourse, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the watercourse. + data_record: The data record of the watercourse node. + name: Name for the Asset + display_name: Display name for the Asset. + ordering: The ordering of the asset + production_obligation: The production obligation for the Watercourse. + penalty_limit: The penalty limit for the watercourse (used by SHOP). + plants: The plants that are connected to the Watercourse. + """ + + view_id = dm.ViewId("sp_powerops_models", "Watercourse", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + ordering: Optional[int] = None + production_obligation: Union[list[TimeSeries], list[str], None] = Field(None, alias="productionObligation") + penalty_limit: Optional[float] = Field(None, alias="penaltyLimit") + plants: Optional[list[PlantGraphQL]] = Field(default=None, repr=False) + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + @field_validator("plants", mode="before") + def parse_graphql(cls, value: Any) -> Any: + if not isinstance(value, dict): + return value + if "items" in value: + return value["items"] + return value + + def as_read(self) -> Watercourse: + """Convert this GraphQL format of watercourse to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return Watercourse( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + production_obligation=self.production_obligation, + penalty_limit=self.penalty_limit, + plants=[plant.as_read() if isinstance(plant, GraphQLCore) else plant for plant in self.plants or []], + ) + + def as_write(self) -> WatercourseWrite: + """Convert this GraphQL format of watercourse to the writing format.""" + return WatercourseWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + production_obligation=self.production_obligation, + penalty_limit=self.penalty_limit, + plants=[plant.as_write() if isinstance(plant, DomainModel) else plant for plant in self.plants or []], + ) + + class Watercourse(DomainModel): """This represents the reading version of watercourse. @@ -72,7 +156,7 @@ class Watercourse(DomainModel): ordering: Optional[int] = None production_obligation: Union[list[TimeSeries], list[str], None] = Field(None, alias="productionObligation") penalty_limit: Optional[float] = Field(None, alias="penaltyLimit") - plants: Union[list[Plant], list[str], None] = Field(default=None, repr=False) + plants: Union[list[Plant], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def as_write(self) -> WatercourseWrite: """Convert this read version of watercourse to the writing version.""" @@ -122,13 +206,14 @@ class WatercourseWrite(DomainModelWrite): ordering: Optional[int] = None production_obligation: Union[list[TimeSeries], list[str], None] = Field(None, alias="productionObligation") penalty_limit: Optional[float] = Field(None, alias="penaltyLimit") - plants: Union[list[PlantWrite], list[str], None] = Field(default=None, repr=False) + plants: Union[list[PlantWrite], list[str], list[dm.NodeId], None] = Field(default=None, repr=False) def _to_instances_write( self, cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -159,7 +244,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( @@ -174,7 +259,13 @@ def _to_instances_write( edge_type = dm.DirectRelationReference("sp_powerops_types", "isSubAssetOf") for plant in self.plants or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=plant, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=plant, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + write_none=write_none, + allow_version_increase=allow_version_increase, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_watercourse_shop.py b/cognite/powerops/client/_generated/v1/data_classes/_watercourse_shop.py index af943ed85..1b1018754 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_watercourse_shop.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_watercourse_shop.py @@ -5,9 +5,12 @@ from cognite.client import data_modeling as dm from pydantic import Field +from pydantic import field_validator, model_validator from ._core import ( DEFAULT_INSTANCE_SPACE, + DataRecord, + DataRecordGraphQL, DataRecordWrite, DomainModel, DomainModelCore, @@ -15,6 +18,7 @@ DomainModelWriteList, DomainModelList, DomainRelationWrite, + GraphQLCore, ResourcesWrite, ) @@ -41,6 +45,66 @@ } +class WatercourseShopGraphQL(GraphQLCore): + """This represents the reading version of watercourse shop, used + when data is retrieved from CDF using GraphQL. + + It is used when retrieving data from CDF using GraphQL. + + Args: + space: The space where the node is located. + external_id: The external id of the watercourse shop. + data_record: The data record of the watercourse shop node. + name: Name for the Asset + display_name: Display name for the Asset. + ordering: The ordering of the asset + """ + + view_id = dm.ViewId("sp_powerops_models", "WatercourseShop", "1") + name: Optional[str] = None + display_name: Optional[str] = Field(None, alias="displayName") + ordering: Optional[int] = None + + @model_validator(mode="before") + def parse_data_record(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values + if "lastUpdatedTime" in values or "createdTime" in values: + values["dataRecord"] = DataRecordGraphQL( + created_time=values.pop("createdTime", None), + last_updated_time=values.pop("lastUpdatedTime", None), + ) + return values + + def as_read(self) -> WatercourseShop: + """Convert this GraphQL format of watercourse shop to the reading format.""" + if self.data_record is None: + raise ValueError("This object cannot be converted to a read format because it lacks a data record.") + return WatercourseShop( + space=self.space, + external_id=self.external_id, + data_record=DataRecord( + version=0, + last_updated_time=self.data_record.last_updated_time, + created_time=self.data_record.created_time, + ), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + ) + + def as_write(self) -> WatercourseShopWrite: + """Convert this GraphQL format of watercourse shop to the writing format.""" + return WatercourseShopWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=0), + name=self.name, + display_name=self.display_name, + ordering=self.ordering, + ) + + class WatercourseShop(DomainModel): """This represents the reading version of watercourse shop. @@ -111,6 +175,7 @@ def _to_instances_write( cache: set[tuple[str, str]], view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, write_none: bool = False, + allow_version_increase: bool = False, ) -> ResourcesWrite: resources = ResourcesWrite() if self.as_tuple_id() in cache: @@ -135,7 +200,7 @@ def _to_instances_write( this_node = dm.NodeApply( space=self.space, external_id=self.external_id, - existing_version=self.data_record.existing_version, + existing_version=None if allow_version_increase else self.data_record.existing_version, type=self.node_type, sources=[ dm.NodeOrEdgeData( diff --git a/poetry.lock b/poetry.lock index ee02563c3..d3a18b33a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -208,33 +208,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.2.0" +version = "24.3.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, - {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, - {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, - {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, - {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, - {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, - {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, - {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, - {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, - {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, - {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, - {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, - {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, - {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, - {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, - {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, - {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, - {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, - {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, - {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, - {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, - {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, + {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, + {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, + {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, + {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, + {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, + {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, + {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, + {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, + {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, + {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, + {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, + {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, + {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, + {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, + {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, + {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, + {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, + {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, + {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, + {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, + {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, + {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, ] [package.dependencies] @@ -496,13 +496,13 @@ python-json-logger = ">=2.0.7,<3.0.0" [[package]] name = "cognite-pygen" -version = "0.99.11" +version = "0.99.14" description = "Cognite Python SDK Generator" optional = false -python-versions = ">=3.9,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "cognite_pygen-0.99.11-py3-none-any.whl", hash = "sha256:b8232ca9015cd7d8451e118cd9f2cda4d53cd7b8289513252c5367f62c3a2a35"}, - {file = "cognite_pygen-0.99.11.tar.gz", hash = "sha256:77b3dcd302e059b74d3f24bc46f1b3a1737bf423a7a3700e4e12117115351a0c"}, + {file = "cognite_pygen-0.99.14-py3-none-any.whl", hash = "sha256:c4f55efa76d6255cc50bf298486ba5fc9d7903436cae0d70205d1c39b8713ae5"}, + {file = "cognite_pygen-0.99.14.tar.gz", hash = "sha256:0dcf0daead07f71be72e2a6eb768b23e98cc58ecb8d7824e511f0845de826aff"}, ] [package.dependencies] @@ -524,13 +524,13 @@ format = ["black (>=24.1.0)"] [[package]] name = "cognite-sdk" -version = "7.26.0" +version = "7.26.2" description = "Cognite Python SDK" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "cognite_sdk-7.26.0-py3-none-any.whl", hash = "sha256:38ae4c57e2e30383c882ddfcfb5fa1d4b702436ba4d785ae8fd0cdaa32044341"}, - {file = "cognite_sdk-7.26.0.tar.gz", hash = "sha256:de5b99fbd820a05465baa0aa6ea8d138ae158baf2607173ad42a2b8d0709aa2a"}, + {file = "cognite_sdk-7.26.2-py3-none-any.whl", hash = "sha256:4d74ce9702941f0705d91caf1895b45737d490b69c1689293ecd58d10a2b0dee"}, + {file = "cognite_sdk-7.26.2.tar.gz", hash = "sha256:389494c564395b0fa6b9834efd3bc4079380b3062004b95c50b2116671320115"}, ] [package.dependencies] @@ -553,7 +553,7 @@ sympy = ["sympy"] yaml = ["PyYAML (>=6.0,<7.0)"] [[package]] -name = "cognite_toolkit" +name = "cognite-toolkit" version = "0.1.1" description = "Official Cognite Data Fusion tool for project templates and configuration deployment" optional = false @@ -585,13 +585,13 @@ files = [ [[package]] name = "comm" -version = "0.2.1" +version = "0.2.2" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" files = [ - {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, - {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, ] [package.dependencies] @@ -665,63 +665,63 @@ test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] [[package]] name = "coverage" -version = "7.4.3" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, - {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, - {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, - {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, - {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, - {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, - {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, - {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, - {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, - {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, - {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, - {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, - {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, - {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.dependencies] @@ -965,53 +965,53 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "fonttools" -version = "4.49.0" +version = "4.50.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.49.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d970ecca0aac90d399e458f0b7a8a597e08f95de021f17785fb68e2dc0b99717"}, - {file = "fonttools-4.49.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac9a745b7609f489faa65e1dc842168c18530874a5f5b742ac3dd79e26bca8bc"}, - {file = "fonttools-4.49.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ba0e00620ca28d4ca11fc700806fd69144b463aa3275e1b36e56c7c09915559"}, - {file = "fonttools-4.49.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdee3ab220283057e7840d5fb768ad4c2ebe65bdba6f75d5d7bf47f4e0ed7d29"}, - {file = "fonttools-4.49.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ce7033cb61f2bb65d8849658d3786188afd80f53dad8366a7232654804529532"}, - {file = "fonttools-4.49.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:07bc5ea02bb7bc3aa40a1eb0481ce20e8d9b9642a9536cde0218290dd6085828"}, - {file = "fonttools-4.49.0-cp310-cp310-win32.whl", hash = "sha256:86eef6aab7fd7c6c8545f3ebd00fd1d6729ca1f63b0cb4d621bccb7d1d1c852b"}, - {file = "fonttools-4.49.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fac1b7eebfce75ea663e860e7c5b4a8831b858c17acd68263bc156125201abf"}, - {file = "fonttools-4.49.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:edc0cce355984bb3c1d1e89d6a661934d39586bb32191ebff98c600f8957c63e"}, - {file = "fonttools-4.49.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83a0d9336de2cba86d886507dd6e0153df333ac787377325a39a2797ec529814"}, - {file = "fonttools-4.49.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36c8865bdb5cfeec88f5028e7e592370a0657b676c6f1d84a2108e0564f90e22"}, - {file = "fonttools-4.49.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33037d9e56e2562c710c8954d0f20d25b8386b397250d65581e544edc9d6b942"}, - {file = "fonttools-4.49.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8fb022d799b96df3eaa27263e9eea306bd3d437cc9aa981820850281a02b6c9a"}, - {file = "fonttools-4.49.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33c584c0ef7dc54f5dd4f84082eabd8d09d1871a3d8ca2986b0c0c98165f8e86"}, - {file = "fonttools-4.49.0-cp311-cp311-win32.whl", hash = "sha256:cbe61b158deb09cffdd8540dc4a948d6e8f4d5b4f3bf5cd7db09bd6a61fee64e"}, - {file = "fonttools-4.49.0-cp311-cp311-win_amd64.whl", hash = "sha256:fc11e5114f3f978d0cea7e9853627935b30d451742eeb4239a81a677bdee6bf6"}, - {file = "fonttools-4.49.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d647a0e697e5daa98c87993726da8281c7233d9d4ffe410812a4896c7c57c075"}, - {file = "fonttools-4.49.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f3bbe672df03563d1f3a691ae531f2e31f84061724c319652039e5a70927167e"}, - {file = "fonttools-4.49.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bebd91041dda0d511b0d303180ed36e31f4f54b106b1259b69fade68413aa7ff"}, - {file = "fonttools-4.49.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4145f91531fd43c50f9eb893faa08399816bb0b13c425667c48475c9f3a2b9b5"}, - {file = "fonttools-4.49.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ea329dafb9670ffbdf4dbc3b0e5c264104abcd8441d56de77f06967f032943cb"}, - {file = "fonttools-4.49.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c076a9e548521ecc13d944b1d261ff3d7825048c338722a4bd126d22316087b7"}, - {file = "fonttools-4.49.0-cp312-cp312-win32.whl", hash = "sha256:b607ea1e96768d13be26d2b400d10d3ebd1456343eb5eaddd2f47d1c4bd00880"}, - {file = "fonttools-4.49.0-cp312-cp312-win_amd64.whl", hash = "sha256:a974c49a981e187381b9cc2c07c6b902d0079b88ff01aed34695ec5360767034"}, - {file = "fonttools-4.49.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b85ec0bdd7bdaa5c1946398cbb541e90a6dfc51df76dfa88e0aaa41b335940cb"}, - {file = "fonttools-4.49.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:af20acbe198a8a790618ee42db192eb128afcdcc4e96d99993aca0b60d1faeb4"}, - {file = "fonttools-4.49.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d418b1fee41a1d14931f7ab4b92dc0bc323b490e41d7a333eec82c9f1780c75"}, - {file = "fonttools-4.49.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b44a52b8e6244b6548851b03b2b377a9702b88ddc21dcaf56a15a0393d425cb9"}, - {file = "fonttools-4.49.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7c7125068e04a70739dad11857a4d47626f2b0bd54de39e8622e89701836eabd"}, - {file = "fonttools-4.49.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29e89d0e1a7f18bc30f197cfadcbef5a13d99806447c7e245f5667579a808036"}, - {file = "fonttools-4.49.0-cp38-cp38-win32.whl", hash = "sha256:9d95fa0d22bf4f12d2fb7b07a46070cdfc19ef5a7b1c98bc172bfab5bf0d6844"}, - {file = "fonttools-4.49.0-cp38-cp38-win_amd64.whl", hash = "sha256:768947008b4dc552d02772e5ebd49e71430a466e2373008ce905f953afea755a"}, - {file = "fonttools-4.49.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:08877e355d3dde1c11973bb58d4acad1981e6d1140711230a4bfb40b2b937ccc"}, - {file = "fonttools-4.49.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fdb54b076f25d6b0f0298dc706acee5052de20c83530fa165b60d1f2e9cbe3cb"}, - {file = "fonttools-4.49.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0af65c720520710cc01c293f9c70bd69684365c6015cc3671db2b7d807fe51f2"}, - {file = "fonttools-4.49.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f255ce8ed7556658f6d23f6afd22a6d9bbc3edb9b96c96682124dc487e1bf42"}, - {file = "fonttools-4.49.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d00af0884c0e65f60dfaf9340e26658836b935052fdd0439952ae42e44fdd2be"}, - {file = "fonttools-4.49.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:263832fae27481d48dfafcc43174644b6706639661e242902ceb30553557e16c"}, - {file = "fonttools-4.49.0-cp39-cp39-win32.whl", hash = "sha256:0404faea044577a01bb82d47a8fa4bc7a54067fa7e324785dd65d200d6dd1133"}, - {file = "fonttools-4.49.0-cp39-cp39-win_amd64.whl", hash = "sha256:b050d362df50fc6e38ae3954d8c29bf2da52be384649ee8245fdb5186b620836"}, - {file = "fonttools-4.49.0-py3-none-any.whl", hash = "sha256:af281525e5dd7fa0b39fb1667b8d5ca0e2a9079967e14c4bfe90fd1cd13e0f18"}, - {file = "fonttools-4.49.0.tar.gz", hash = "sha256:ebf46e7f01b7af7861310417d7c49591a85d99146fc23a5ba82fdb28af156321"}, + {file = "fonttools-4.50.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effd303fb422f8ce06543a36ca69148471144c534cc25f30e5be752bc4f46736"}, + {file = "fonttools-4.50.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7913992ab836f621d06aabac118fc258b9947a775a607e1a737eb3a91c360335"}, + {file = "fonttools-4.50.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0a1c5bd2f63da4043b63888534b52c5a1fd7ae187c8ffc64cbb7ae475b9dab"}, + {file = "fonttools-4.50.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d40fc98540fa5360e7ecf2c56ddf3c6e7dd04929543618fd7b5cc76e66390562"}, + {file = "fonttools-4.50.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fff65fbb7afe137bac3113827855e0204482727bddd00a806034ab0d3951d0d"}, + {file = "fonttools-4.50.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1aeae3dd2ee719074a9372c89ad94f7c581903306d76befdaca2a559f802472"}, + {file = "fonttools-4.50.0-cp310-cp310-win32.whl", hash = "sha256:e9623afa319405da33b43c85cceb0585a6f5d3a1d7c604daf4f7e1dd55c03d1f"}, + {file = "fonttools-4.50.0-cp310-cp310-win_amd64.whl", hash = "sha256:778c5f43e7e654ef7fe0605e80894930bc3a7772e2f496238e57218610140f54"}, + {file = "fonttools-4.50.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3dfb102e7f63b78c832e4539969167ffcc0375b013080e6472350965a5fe8048"}, + {file = "fonttools-4.50.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e58fe34cb379ba3d01d5d319d67dd3ce7ca9a47ad044ea2b22635cd2d1247fc"}, + {file = "fonttools-4.50.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c673ab40d15a442a4e6eb09bf007c1dda47c84ac1e2eecbdf359adacb799c24"}, + {file = "fonttools-4.50.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b3ac35cdcd1a4c90c23a5200212c1bb74fa05833cc7c14291d7043a52ca2aaa"}, + {file = "fonttools-4.50.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8844e7a2c5f7ecf977e82eb6b3014f025c8b454e046d941ece05b768be5847ae"}, + {file = "fonttools-4.50.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f849bd3c5c2249b49c98eca5aaebb920d2bfd92b3c69e84ca9bddf133e9f83f0"}, + {file = "fonttools-4.50.0-cp311-cp311-win32.whl", hash = "sha256:39293ff231b36b035575e81c14626dfc14407a20de5262f9596c2cbb199c3625"}, + {file = "fonttools-4.50.0-cp311-cp311-win_amd64.whl", hash = "sha256:c33d5023523b44d3481624f840c8646656a1def7630ca562f222eb3ead16c438"}, + {file = "fonttools-4.50.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b4a886a6dbe60100ba1cd24de962f8cd18139bd32808da80de1fa9f9f27bf1dc"}, + {file = "fonttools-4.50.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b2ca1837bfbe5eafa11313dbc7edada79052709a1fffa10cea691210af4aa1fa"}, + {file = "fonttools-4.50.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0493dd97ac8977e48ffc1476b932b37c847cbb87fd68673dee5182004906828"}, + {file = "fonttools-4.50.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77844e2f1b0889120b6c222fc49b2b75c3d88b930615e98893b899b9352a27ea"}, + {file = "fonttools-4.50.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3566bfb8c55ed9100afe1ba6f0f12265cd63a1387b9661eb6031a1578a28bad1"}, + {file = "fonttools-4.50.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:35e10ddbc129cf61775d58a14f2d44121178d89874d32cae1eac722e687d9019"}, + {file = "fonttools-4.50.0-cp312-cp312-win32.whl", hash = "sha256:cc8140baf9fa8f9b903f2b393a6c413a220fa990264b215bf48484f3d0bf8710"}, + {file = "fonttools-4.50.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ccc85fd96373ab73c59833b824d7a73846670a0cb1f3afbaee2b2c426a8f931"}, + {file = "fonttools-4.50.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e270a406219af37581d96c810172001ec536e29e5593aa40d4c01cca3e145aa6"}, + {file = "fonttools-4.50.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac2463de667233372e9e1c7e9de3d914b708437ef52a3199fdbf5a60184f190c"}, + {file = "fonttools-4.50.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47abd6669195abe87c22750dbcd366dc3a0648f1b7c93c2baa97429c4dc1506e"}, + {file = "fonttools-4.50.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:074841375e2e3d559aecc86e1224caf78e8b8417bb391e7d2506412538f21adc"}, + {file = "fonttools-4.50.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0743fd2191ad7ab43d78cd747215b12033ddee24fa1e088605a3efe80d6984de"}, + {file = "fonttools-4.50.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3d7080cce7be5ed65bee3496f09f79a82865a514863197ff4d4d177389e981b0"}, + {file = "fonttools-4.50.0-cp38-cp38-win32.whl", hash = "sha256:a467ba4e2eadc1d5cc1a11d355abb945f680473fbe30d15617e104c81f483045"}, + {file = "fonttools-4.50.0-cp38-cp38-win_amd64.whl", hash = "sha256:f77e048f805e00870659d6318fd89ef28ca4ee16a22b4c5e1905b735495fc422"}, + {file = "fonttools-4.50.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b6245eafd553c4e9a0708e93be51392bd2288c773523892fbd616d33fd2fda59"}, + {file = "fonttools-4.50.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a4062cc7e8de26f1603323ef3ae2171c9d29c8a9f5e067d555a2813cd5c7a7e0"}, + {file = "fonttools-4.50.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34692850dfd64ba06af61e5791a441f664cb7d21e7b544e8f385718430e8f8e4"}, + {file = "fonttools-4.50.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678dd95f26a67e02c50dcb5bf250f95231d455642afbc65a3b0bcdacd4e4dd38"}, + {file = "fonttools-4.50.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f2ce7b0b295fe64ac0a85aef46a0f2614995774bd7bc643b85679c0283287f9"}, + {file = "fonttools-4.50.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d346f4dc2221bfb7ab652d1e37d327578434ce559baf7113b0f55768437fe6a0"}, + {file = "fonttools-4.50.0-cp39-cp39-win32.whl", hash = "sha256:a51eeaf52ba3afd70bf489be20e52fdfafe6c03d652b02477c6ce23c995222f4"}, + {file = "fonttools-4.50.0-cp39-cp39-win_amd64.whl", hash = "sha256:8639be40d583e5d9da67795aa3eeeda0488fb577a1d42ae11a5036f18fb16d93"}, + {file = "fonttools-4.50.0-py3-none-any.whl", hash = "sha256:48fa36da06247aa8282766cfd63efff1bb24e55f020f29a335939ed3844d20d3"}, + {file = "fonttools-4.50.0.tar.gz", hash = "sha256:fa5cf61058c7dbb104c2ac4e782bf1b2016a8cf2f69de6e4dd6a865d2c969bb5"}, ] [package.extras] @@ -1089,13 +1089,13 @@ test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre [[package]] name = "griffe" -version = "0.41.0" +version = "0.42.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = true python-versions = ">=3.8" files = [ - {file = "griffe-0.41.0-py3-none-any.whl", hash = "sha256:8aa7fc6eb00cb80af9c0198178c6b7110cb59fa2c5187bb13ea25eebbe4dd928"}, - {file = "griffe-0.41.0.tar.gz", hash = "sha256:850128c3198c18713eaf0a6cc8572e590a16b1965f72a4e871e66cf84740903f"}, + {file = "griffe-0.42.1-py3-none-any.whl", hash = "sha256:7e805e35617601355edcac0d3511cedc1ed0cb1f7645e2d336ae4b05bbae7b3b"}, + {file = "griffe-0.42.1.tar.gz", hash = "sha256:57046131384043ed078692b85d86b76568a686266cc036b9b56b704466f803ce"}, ] [package.dependencies] @@ -1184,32 +1184,32 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.0.2" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, + {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" -version = "6.1.2" +version = "6.3.2" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.1.2-py3-none-any.whl", hash = "sha256:9a0a862501dc38b68adebc82970140c9e4209fc99601782925178f8386339938"}, - {file = "importlib_resources-6.1.2.tar.gz", hash = "sha256:308abf8474e2dba5f867d279237cd4076482c3de7104a40b41426370e891549b"}, + {file = "importlib_resources-6.3.2-py3-none-any.whl", hash = "sha256:f41f4098b16cd140a97d256137cfd943d958219007990b2afb00439fc623f580"}, + {file = "importlib_resources-6.3.2.tar.gz", hash = "sha256:963eb79649252b0160c1afcfe5a1d3fe3ad66edd0a8b114beacffb70c0674223"}, ] [package.dependencies] @@ -1217,7 +1217,7 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +testing = ["jaraco.collections", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "inflect" @@ -1425,18 +1425,15 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "json5" -version = "0.9.17" +version = "0.9.24" description = "A Python implementation of the JSON5 data format." optional = false python-versions = ">=3.8" files = [ - {file = "json5-0.9.17-py2.py3-none-any.whl", hash = "sha256:f8ec1ecf985951d70f780f6f877c4baca6a47b6e61e02c4cd190138d10a7805a"}, - {file = "json5-0.9.17.tar.gz", hash = "sha256:717d99d657fa71b7094877b1d921b1cce40ab444389f6d770302563bb7dfd9ae"}, + {file = "json5-0.9.24-py3-none-any.whl", hash = "sha256:4ca101fd5c7cb47960c055ef8f4d0e31e15a7c6c48c3b6f1473fc83b6c462a13"}, + {file = "json5-0.9.24.tar.gz", hash = "sha256:0c638399421da959a20952782800e5c1a78c14e08e1dc9738fa10d8ec14d58c8"}, ] -[package.extras] -dev = ["hypothesis"] - [[package]] name = "jsonpointer" version = "2.4" @@ -1513,13 +1510,13 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "8.6.0" +version = "8.6.1" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, - {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, + {file = "jupyter_client-8.6.1-py3-none-any.whl", hash = "sha256:3b7bd22f058434e3b9a7ea4b1500ed47de2713872288c0d511d19926f99b459f"}, + {file = "jupyter_client-8.6.1.tar.gz", hash = "sha256:e842515e2bab8e19186d89fdfea7abd15e39dd581f94e399f00e2af5a1652d3f"}, ] [package.dependencies] @@ -1560,13 +1557,13 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.7.1" +version = "5.7.2" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, - {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, ] [package.dependencies] @@ -1576,17 +1573,17 @@ traitlets = ">=5.3" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" -version = "0.9.0" +version = "0.10.0" description = "Jupyter Event System library" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, - {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, + {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, + {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, ] [package.dependencies] @@ -1605,13 +1602,13 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p [[package]] name = "jupyter-lsp" -version = "2.2.3" +version = "2.2.4" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter-lsp-2.2.3.tar.gz", hash = "sha256:33dbcbc5df24237ff5c8b696b04ff4689fcd316cb8d4957d620fe5504d7d2c3f"}, - {file = "jupyter_lsp-2.2.3-py3-none-any.whl", hash = "sha256:57dd90d0a2e2530831793550846168c81c952b49e187aa339e455027a5f0fd2e"}, + {file = "jupyter-lsp-2.2.4.tar.gz", hash = "sha256:5e50033149344065348e688608f3c6d654ef06d9856b67655bd7b6bac9ee2d59"}, + {file = "jupyter_lsp-2.2.4-py3-none-any.whl", hash = "sha256:da61cb63a16b6dff5eac55c2699cc36eac975645adee02c41bdfc03bf4802e77"}, ] [package.dependencies] @@ -1620,13 +1617,13 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.12.5" +version = "2.13.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, - {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, + {file = "jupyter_server-2.13.0-py3-none-any.whl", hash = "sha256:77b2b49c3831fbbfbdb5048cef4350d12946191f833a24e5f83e5f8f4803e97b"}, + {file = "jupyter_server-2.13.0.tar.gz", hash = "sha256:c80bfb049ea20053c3d9641c2add4848b38073bf79f1729cea1faed32fc1c78e"}, ] [package.dependencies] @@ -1652,17 +1649,17 @@ websocket-client = "*" [package.extras] docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] [[package]] name = "jupyter-server-terminals" -version = "0.5.2" +version = "0.5.3" description = "A Jupyter Server Extension Providing Terminals." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.5.2-py3-none-any.whl", hash = "sha256:1b80c12765da979513c42c90215481bbc39bd8ae7c0350b4f85bc3eb58d0fa80"}, - {file = "jupyter_server_terminals-0.5.2.tar.gz", hash = "sha256:396b5ccc0881e550bf0ee7012c6ef1b53edbde69e67cab1d56e89711b46052e8"}, + {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, + {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, ] [package.dependencies] @@ -1675,13 +1672,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.1.2" +version = "4.1.5" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.1.2-py3-none-any.whl", hash = "sha256:aa88193f03cf4d3555f6712f04d74112b5eb85edd7d222c588c7603a26d33c5b"}, - {file = "jupyterlab-4.1.2.tar.gz", hash = "sha256:5d6348b3ed4085181499f621b7dfb6eb0b1f57f3586857aadfc8e3bf4c4885f9"}, + {file = "jupyterlab-4.1.5-py3-none-any.whl", hash = "sha256:3bc843382a25e1ab7bc31d9e39295a9f0463626692b7995597709c0ab236ab2c"}, + {file = "jupyterlab-4.1.5.tar.gz", hash = "sha256:c9ad75290cb10bfaff3624bf3fbb852319b4cce4c456613f8ebbaa98d03524db"}, ] [package.dependencies] @@ -1719,13 +1716,13 @@ files = [ [[package]] name = "jupyterlab-server" -version = "2.25.3" +version = "2.25.4" description = "A set of server components for JupyterLab and JupyterLab like applications." optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab_server-2.25.3-py3-none-any.whl", hash = "sha256:c48862519fded9b418c71645d85a49b2f0ec50d032ba8316738e9276046088c1"}, - {file = "jupyterlab_server-2.25.3.tar.gz", hash = "sha256:846f125a8a19656611df5b03e5912c8393cea6900859baa64fa515eb64a8dc40"}, + {file = "jupyterlab_server-2.25.4-py3-none-any.whl", hash = "sha256:eb645ecc8f9b24bac5decc7803b6d5363250e16ec5af814e516bc2c54dd88081"}, + {file = "jupyterlab_server-2.25.4.tar.gz", hash = "sha256:2098198e1e82e0db982440f9b5136175d73bea2cd42a6480aa6fd502cb23c4f9"}, ] [package.dependencies] @@ -1741,7 +1738,7 @@ requests = ">=2.31" [package.extras] docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] [[package]] name = "jupyterlab-widgets" @@ -1939,13 +1936,13 @@ dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptio [[package]] name = "markdown" -version = "3.5.2" +version = "3.6" description = "Python implementation of John Gruber's Markdown." optional = true python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, - {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, ] [package.dependencies] @@ -2212,13 +2209,13 @@ mkdocs = ">=1.1" [[package]] name = "mkdocs-git-authors-plugin" -version = "0.7.2" +version = "0.8.0" description = "Mkdocs plugin to display git authors of a page" optional = true python-versions = ">=3.7" files = [ - {file = "mkdocs-git-authors-plugin-0.7.2.tar.gz", hash = "sha256:f541730e4cabdafa0ac758c94d28ba5e8ddca4c859e5de4c89f1226cb6ccd0ad"}, - {file = "mkdocs_git_authors_plugin-0.7.2-py3-none-any.whl", hash = "sha256:c8a2784a867db79ad3b477a96ee96875d17b09192b6d3be71f08df25afff76c4"}, + {file = "mkdocs-git-authors-plugin-0.8.0.tar.gz", hash = "sha256:10dfc57fb10d5c3aceb0e5cdea199ac3a7588979f26484eba46d935dc1044c26"}, + {file = "mkdocs_git_authors_plugin-0.8.0-py3-none-any.whl", hash = "sha256:0614f4f87d31eabd0a0d607c9e0532608fc593997ebee282ec564ee6dc1c041e"}, ] [package.dependencies] @@ -2284,13 +2281,13 @@ pygments = ">2.12.0" [[package]] name = "mkdocs-material" -version = "9.5.12" +version = "9.5.14" description = "Documentation that simply works" optional = true python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.12-py3-none-any.whl", hash = "sha256:d6f0c269f015e48c76291cdc79efb70f7b33bbbf42d649cfe475522ebee61b1f"}, - {file = "mkdocs_material-9.5.12.tar.gz", hash = "sha256:5f69cef6a8aaa4050b812f72b1094fda3d079b9a51cf27a247244c03ec455e97"}, + {file = "mkdocs_material-9.5.14-py3-none-any.whl", hash = "sha256:a45244ac221fda46ecf8337f00ec0e5cb5348ab9ffb203ca2a0c313b0d4dbc27"}, + {file = "mkdocs_material-9.5.14.tar.gz", hash = "sha256:2a1f8e67cda2587ab93ecea9ba42d0ca61d1d7b5fad8cf690eeaeb39dcd4b9af"}, ] [package.dependencies] @@ -2379,13 +2376,13 @@ files = [ [[package]] name = "msal" -version = "1.27.0" +version = "1.28.0" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false -python-versions = ">=2.7" +python-versions = ">=3.7" files = [ - {file = "msal-1.27.0-py2.py3-none-any.whl", hash = "sha256:572d07149b83e7343a85a3bcef8e581167b4ac76befcbbb6eef0c0e19643cdc0"}, - {file = "msal-1.27.0.tar.gz", hash = "sha256:3109503c038ba6b307152b0e8d34f98113f2e7a78986e28d0baf5b5303afda52"}, + {file = "msal-1.28.0-py3-none-any.whl", hash = "sha256:3064f80221a21cd535ad8c3fafbb3a3582cd9c7e9af0bb789ae14f726a0ca99b"}, + {file = "msal-1.28.0.tar.gz", hash = "sha256:80bbabe34567cb734efd2ec1869b2d98195c927455369d8077b3c542088c5c9d"}, ] [package.dependencies] @@ -2398,18 +2395,38 @@ broker = ["pymsalruntime (>=0.13.2,<0.15)"] [[package]] name = "mypy" -version = "1.8.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -2436,13 +2453,13 @@ files = [ [[package]] name = "nbclient" -version = "0.9.0" +version = "0.10.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.8.0" files = [ - {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, - {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, + {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, + {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, ] [package.dependencies] @@ -2454,17 +2471,17 @@ traitlets = ">=5.4" [package.extras] dev = ["pre-commit"] docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" -version = "7.16.1" +version = "7.16.2" description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.16.1-py3-none-any.whl", hash = "sha256:3188727dffadfdc9c6a1c7250729063d7bc78b355ad7aa023138afa030d1cd07"}, - {file = "nbconvert-7.16.1.tar.gz", hash = "sha256:e79e6a074f49ba3ed29428ed86487bf51509d9aab613bd8522ac08f6d28fd7fd"}, + {file = "nbconvert-7.16.2-py3-none-any.whl", hash = "sha256:0c01c23981a8de0220255706822c40b751438e32467d6a686e26be08ba784382"}, + {file = "nbconvert-7.16.2.tar.gz", hash = "sha256:8310edd41e1c43947e4ecf16614c61469ebc024898eb808cce0999860fc9fb16"}, ] [package.dependencies] @@ -2496,13 +2513,13 @@ webpdf = ["playwright"] [[package]] name = "nbformat" -version = "5.9.2" +version = "5.10.3" description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" files = [ - {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, - {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, + {file = "nbformat-5.10.3-py3-none-any.whl", hash = "sha256:d9476ca28676799af85385f409b49d95e199951477a159a576ef2a675151e5e8"}, + {file = "nbformat-5.10.3.tar.gz", hash = "sha256:60ed5e910ef7c6264b87d644f276b1b49e24011930deef54605188ddeb211685"}, ] [package.dependencies] @@ -2567,13 +2584,13 @@ setuptools = "*" [[package]] name = "notebook" -version = "7.1.1" +version = "7.1.2" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.1.1-py3-none-any.whl", hash = "sha256:197d8e0595acabf4005851c8716e952a81b405f7aefb648067a761fbde267ce7"}, - {file = "notebook-7.1.1.tar.gz", hash = "sha256:818e7420fa21f402e726afb9f02df7f3c10f294c02e383ed19852866c316108b"}, + {file = "notebook-7.1.2-py3-none-any.whl", hash = "sha256:fc6c24b9aef18d0cd57157c9c47e95833b9b0bdc599652639acf0bdb61dc7d5f"}, + {file = "notebook-7.1.2.tar.gz", hash = "sha256:efc2c80043909e0faa17fce9e9b37c059c03af0ec99a4d4db84cb21d9d2e936a"}, ] [package.dependencies] @@ -2693,13 +2710,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -2787,13 +2804,13 @@ xml = ["lxml (>=4.9.2)"] [[package]] name = "pandas-stubs" -version = "2.2.0.240218" +version = "2.2.1.240316" description = "Type annotations for pandas" optional = false python-versions = ">=3.9" files = [ - {file = "pandas_stubs-2.2.0.240218-py3-none-any.whl", hash = "sha256:e97478320add9b958391b15a56c5f1bf29da656d5b747d28bbe708454b3a1fe6"}, - {file = "pandas_stubs-2.2.0.240218.tar.gz", hash = "sha256:63138c12eec715d66d48611bdd922f31cd7c78bcadd19384c3bd61fd3720a11a"}, + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, ] [package.dependencies] @@ -2938,17 +2955,17 @@ xmp = ["defusedxml"] [[package]] name = "pkginfo" -version = "1.9.6" +version = "1.10.0" description = "Query metadata from sdists / bdists / installed packages." optional = false python-versions = ">=3.6" files = [ - {file = "pkginfo-1.9.6-py3-none-any.whl", hash = "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546"}, - {file = "pkginfo-1.9.6.tar.gz", hash = "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"}, + {file = "pkginfo-1.10.0-py3-none-any.whl", hash = "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097"}, + {file = "pkginfo-1.10.0.tar.gz", hash = "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297"}, ] [package.extras] -testing = ["pytest", "pytest-cov"] +testing = ["pytest", "pytest-cov", "wheel"] [[package]] name = "platformdirs" @@ -3039,22 +3056,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.25.3" +version = "5.26.0" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-5.26.0-cp310-abi3-win32.whl", hash = "sha256:f9ecc8eb6f18037e0cbf43256db0325d4723f429bca7ef5cd358b7c29d65f628"}, + {file = "protobuf-5.26.0-cp310-abi3-win_amd64.whl", hash = "sha256:dfd29f6eb34107dccf289a93d44fb6b131e68888d090b784b691775ac84e8213"}, + {file = "protobuf-5.26.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:7e47c57303466c867374a17b2b5e99c5a7c8b72a94118e2f28efb599f19b4069"}, + {file = "protobuf-5.26.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e184175276edc222e2d5e314a72521e10049938a9a4961fe4bea9b25d073c03f"}, + {file = "protobuf-5.26.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:6ee9d1aa02f951c5ce10bf8c6cfb7604133773038e33f913183c8b5201350600"}, + {file = "protobuf-5.26.0-cp38-cp38-win32.whl", hash = "sha256:2c334550e1cb4efac5c8a3987384bf13a4334abaf5ab59e40479e7b70ecd6b19"}, + {file = "protobuf-5.26.0-cp38-cp38-win_amd64.whl", hash = "sha256:8eef61a90631c21b06b4f492a27e199a269827f046de3bb68b61aa84fcf50905"}, + {file = "protobuf-5.26.0-cp39-cp39-win32.whl", hash = "sha256:ca825f4eecb8c342d2ec581e6a5ad1ad1a47bededaecd768e0d3451ae4aaac2b"}, + {file = "protobuf-5.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:efd4f5894c50bd76cbcfdd668cd941021333861ed0f441c78a83d8254a01cc9f"}, + {file = "protobuf-5.26.0-py3-none-any.whl", hash = "sha256:a49b6c5359bf34fb7bf965bf21abfab4476e4527d822ab5289ee3bf73f291159"}, + {file = "protobuf-5.26.0.tar.gz", hash = "sha256:82f5870d74c99addfe4152777bdf8168244b9cf0ac65f8eccf045ddfa9d80d9b"}, ] [[package]] @@ -3123,13 +3140,13 @@ files = [ [[package]] name = "pydantic" -version = "2.6.3" +version = "2.6.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, - {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, ] [package.dependencies] @@ -3287,13 +3304,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymdown-extensions" -version = "10.7" +version = "10.7.1" description = "Extension pack for Python Markdown." optional = true python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.7-py3-none-any.whl", hash = "sha256:6ca215bc57bc12bf32b414887a68b810637d039124ed9b2e5bd3325cbb2c050c"}, - {file = "pymdown_extensions-10.7.tar.gz", hash = "sha256:c0d64d5cf62566f59e6b2b690a4095c931107c250a8c8e1351c1de5f6b036deb"}, + {file = "pymdown_extensions-10.7.1-py3-none-any.whl", hash = "sha256:f5cc7000d7ff0d1ce9395d216017fa4df3dde800afb1fb72d1c7d3fd35e710f4"}, + {file = "pymdown_extensions-10.7.1.tar.gz", hash = "sha256:c70e146bdd83c744ffc766b4671999796aba18842b268510a329f7f64700d584"}, ] [package.dependencies] @@ -3305,13 +3322,13 @@ extra = ["pygments (>=2.12)"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -3412,13 +3429,13 @@ num = ["numpy", "pandas"] [[package]] name = "python-dateutil" -version = "2.9.0" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.9.0.tar.gz", hash = "sha256:78e73e19c63f5b20ffa567001531680d939dc042bf7850431877645523c66709"}, - {file = "python_dateutil-2.9.0-py2.py3-none-any.whl", hash = "sha256:cbf2f1da5e6083ac2fbfd4da39a25f34312230110440f424a14c7558bb85d82e"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -3740,13 +3757,13 @@ md = ["cmarkgfm (>=0.8.0)"] [[package]] name = "referencing" -version = "0.33.0" +version = "0.34.0" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, - {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, + {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, + {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, ] [package.dependencies] @@ -3878,13 +3895,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" -version = "1.3.1" +version = "1.4.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, + {file = "requests-oauthlib-1.4.0.tar.gz", hash = "sha256:acee623221e4a39abcbb919312c8ff04bd44e7e417087fb4bd5e2a2f53d5e79a"}, + {file = "requests_oauthlib-1.4.0-py2.py3-none-any.whl", hash = "sha256:7a3130d94a17520169e38db6c8d75f2c974643788465ecc2e4b36d288bf13033"}, ] [package.dependencies] @@ -4106,13 +4123,13 @@ win32 = ["pywin32"] [[package]] name = "sentry-sdk" -version = "1.40.6" +version = "1.43.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.40.6.tar.gz", hash = "sha256:f143f3fb4bb57c90abef6e2ad06b5f6f02b2ca13e4060ec5c0549c7a9ccce3fa"}, - {file = "sentry_sdk-1.40.6-py2.py3-none-any.whl", hash = "sha256:becda09660df63e55f307570e9817c664392655a7328bbc414b507e9cb874c67"}, + {file = "sentry-sdk-1.43.0.tar.gz", hash = "sha256:41df73af89d22921d8733714fb0fc5586c3461907e06688e6537d01a27e0e0f6"}, + {file = "sentry_sdk-1.43.0-py2.py3-none-any.whl", hash = "sha256:8d768724839ca18d7b4c7463ef7528c40b7aa2bfbf7fe554d5f9a7c044acfd36"}, ] [package.dependencies] @@ -4126,6 +4143,7 @@ asyncpg = ["asyncpg (>=0.23)"] beam = ["apache-beam (>=2.12)"] bottle = ["bottle (>=0.12.13)"] celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] chalice = ["chalice (>=1.16.0)"] clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] django = ["django (>=1.8)"] @@ -4136,9 +4154,10 @@ grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] -pure-eval = ["asttokens", "executing", "pure_eval"] +pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] @@ -4151,18 +4170,18 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -4271,13 +4290,13 @@ widechars = ["wcwidth"] [[package]] name = "terminado" -version = "0.18.0" +version = "0.18.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." optional = false python-versions = ">=3.8" files = [ - {file = "terminado-0.18.0-py3-none-any.whl", hash = "sha256:87b0d96642d0fe5f5abd7783857b9cab167f221a39ff98e3b9619a788a3c0f2e"}, - {file = "terminado-0.18.0.tar.gz", hash = "sha256:1ea08a89b835dd1b8c0c900d92848147cef2537243361b2e3f4dc15df9b6fded"}, + {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, + {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, ] [package.dependencies] @@ -4363,18 +4382,18 @@ files = [ [[package]] name = "traitlets" -version = "5.14.1" +version = "5.14.2" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, - {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, + {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, + {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "twine" @@ -4424,13 +4443,13 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. [[package]] name = "types-python-dateutil" -version = "2.8.19.20240106" +version = "2.9.0.20240316" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, - {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, + {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, + {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, ] [[package]] @@ -4446,13 +4465,13 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.12" +version = "6.0.12.20240311" description = "Typing stubs for PyYAML" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, ] [[package]] @@ -4649,18 +4668,18 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "zipp" -version = "3.17.0" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] docs = ["mkdocs", "mkdocs-git-authors-plugin", "mkdocs-git-revision-date-localized-plugin", "mkdocs-gitbook", "mkdocs-glightbox", "mkdocs-jupyter", "mkdocs-material", "mkdocs-material-extensions", "mkdocstrings", "pymdown-extensions"] @@ -4668,4 +4687,4 @@ docs = ["mkdocs", "mkdocs-git-authors-plugin", "mkdocs-git-revision-date-localiz [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.13" -content-hash = "b86a611864d7e2d06db7d33b9ee3996381af870b37222e64378dcc44208f35a8" +content-hash = "537de997283120c16932c41b60f771ffad25d677f8f8b24bab964e3577f61ce0" diff --git a/pyproject.toml b/pyproject.toml index 9aa47b9e5..2ae13f774 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cognite-power-ops" -version = "0.91.2" +version = "0.91.3" description = "SDK for power markets operations on Cognite Data Fusion" readme = "README.md" authors = ["Cognite "] @@ -69,7 +69,7 @@ StrEnum = {version = "^0.4.15", python = "<3.11"} tomli-w = "^1.0.0" tabulate = "^0.9" cognite-toolkit = {version="0.1.1"} -cognite-pygen = {version = ">=0.99.10", extras = ["all"]} +cognite-pygen = {version = ">=0.99.14", extras = ["all"]} mkdocs = {version="*", optional=true} mkdocs-jupyter = {version="*", optional=true}