From 05125aeb8f4c95dc4f28f66667ad2ce8ce16f69c Mon Sep 17 00:00:00 2001 From: Julia Graham <47179573+Juliamg@users.noreply.github.com> Date: Tue, 12 Mar 2024 12:56:51 +0100 Subject: [PATCH] changes to accommodate old cogshop model and improved scenario concept (#328) * changes to accommodate old cogshop model and improved scenario concept * erik comments * review comments * remove unused type * Update cognite/powerops/custom_modules/power_model_v1/data_models/containers/Case.container.yaml Co-authored-by: Erik Lien Johnsen * Update cognite/powerops/custom_modules/power_model_v1/data_models/containers/Case.container.yaml Co-authored-by: Erik Lien Johnsen * Update cognite/powerops/custom_modules/power_model_v1/data_models/containers/Case.container.yaml Co-authored-by: Erik Lien Johnsen * review comments on modelTemplate and Case * add missing view Commands to two datamodels * more missing views * last deployed version * fix write_read_instances script and regen pygen sdk * fix name mapping * remove type filter on SHOPResult view and add a TODO with deciding on correct filtering * changelog and version --------- Co-authored-by: Erik Lien Johnsen --- CHANGELOG.md | 9 + cognite/powerops/_version.py | 2 +- .../client/_generated/afrr_bid/_api_client.py | 4 +- .../client/_generated/assets/_api_client.py | 4 +- .../_generated/day_ahead_bid/_api_client.py | 4 +- .../v1/_api/bid_configuration_shop.py | 253 +++++++++ .../_generated/v1/_api/bid_method_custom.py | 17 - .../v1/_api/bid_method_custom_query.py | 25 - .../v1/_api/bid_method_day_ahead.py | 17 - .../v1/_api/bid_method_day_ahead_query.py | 25 - .../v1/_api/bid_method_shop_multi_scenario.py | 143 ++++- .../bid_method_shop_multi_scenario_query.py | 49 +- ...id_method_shop_multi_scenario_scenarios.py | 54 ++ .../v1/_api/bid_method_water_value.py | 17 - .../v1/_api/bid_method_water_value_query.py | 25 - .../client/_generated/v1/_api/case.py | 415 ++++++++++++++ .../client/_generated/v1/_api/case_query.py | 73 +++ .../client/_generated/v1/_api/commands.py | 396 +++++++++++++ .../_generated/v1/_api/commands_query.py | 48 ++ .../v1/_api/market_configuration.py | 34 ++ .../_generated/v1/_api/model_template.py | 102 ++-- .../v1/_api/multi_scenario_matrix.py | 24 +- .../v1/_api/multi_scenario_matrix_query.py | 20 +- .../multi_scenario_matrix_scenario_results.py | 54 ++ .../_generated/v1/_api/preprocessor_input.py | 103 +++- .../v1/_api/preprocessor_input_query.py | 20 +- .../_generated/v1/_api/preprocessor_output.py | 34 +- .../v1/_api/preprocessor_output_query.py | 28 +- .../_generated/v1/_api/price_prod_case.py | 362 ++++++++++++ .../v1/_api/price_prod_case_price.py | 501 ++++++++++++++++ .../v1/_api/price_prod_case_production.py | 503 +++++++++++++++++ .../v1/_api/price_prod_case_query.py | 73 +++ .../client/_generated/v1/_api/scenario.py | 239 +------- .../_generated/v1/_api/scenario_query.py | 26 + .../shop_partial_bid_calculation_input.py | 59 +- ...hop_partial_bid_calculation_input_query.py | 69 +-- ...alculation_input_shop_result_price_prod.py | 54 ++ .../client/_generated/v1/_api/shop_result.py | 48 +- .../v1/_api/shop_result_output_timeseries.py | 505 +++++++++++++++++ .../v1/_api/shop_result_price_prod.py | 427 ++++++++++++++ .../v1/_api/shop_result_price_prod_alerts.py | 54 ++ ...hop_result_price_prod_output_timeseries.py | 513 +++++++++++++++++ ...result_price_prod_production_timeseries.py | 54 ++ .../v1/_api/shop_result_price_prod_query.py | 195 +++++++ .../_generated/v1/_api/shop_result_query.py | 54 +- .../_generated/v1/_api/shop_time_series.py | 500 ++++++++++++++++ .../v1/_api/shop_time_series_query.py | 48 ++ .../v1/_api/shop_time_series_timeseries.py | 533 ++++++++++++++++++ .../client/_generated/v1/_api_client.py | 59 +- .../_generated/v1/data_classes/__init__.py | 135 +++-- .../data_classes/_bid_configuration_shop.py | 26 + .../v1/data_classes/_bid_method_custom.py | 52 +- .../v1/data_classes/_bid_method_day_ahead.py | 54 +- .../_bid_method_shop_multi_scenario.py | 129 +++-- .../data_classes/_bid_method_water_value.py | 52 +- .../_generated/v1/data_classes/_case.py | 312 ++++++++++ .../_generated/v1/data_classes/_commands.py | 183 ++++++ .../v1/data_classes/_market_configuration.py | 20 +- .../v1/data_classes/_model_template.py | 69 +-- .../v1/data_classes/_multi_scenario_matrix.py | 30 +- .../v1/data_classes/_preprocessor_input.py | 94 ++- .../v1/data_classes/_preprocessor_output.py | 47 +- .../v1/data_classes/_price_prod_case.py | 255 +++++++++ .../_generated/v1/data_classes/_scenario.py | 213 ++----- .../_shop_partial_bid_calculation_input.py | 67 ++- .../v1/data_classes/_shop_result.py | 176 +++--- .../data_classes/_shop_result_price_prod.py | 362 ++++++++++++ .../v1/data_classes/_shop_time_series.py | 249 ++++++++ .../data_models/all_PowerOps.datamodel.yaml | 32 +- ...ompute_DayAheadBenchmarking.datamodel.yaml | 2 +- .../compute_SHOPBasedDayAhead.datamodel.yaml | 24 +- ...compute_TotalBidCalculation.datamodel.yaml | 22 +- ...ute_WaterValueBasedDayAhead.datamodel.yaml | 4 - ...onfig_DayAheadConfiguration.datamodel.yaml | 16 +- .../BidConfiguration.container.yaml | 8 + .../containers/BidMethod.container.yaml | 28 + .../containers/Case.container.yaml | 68 +++ .../containers/CommandsConfig.container.yaml | 13 + .../containers/FunctionData.container.yaml | 6 + .../MarketConfiguration.container.yaml | 8 + .../containers/ModelTemplate.container.yaml | 27 +- .../MultiScenarioMatrix.container.yaml | 26 - .../containers/PriceScenario.container.yaml | 28 - .../containers/SHOPResult.container.yaml | 108 ++++ .../containers/SHOPTimeSeries.container.yaml | 51 ++ .../containers/Scenario.container.yaml | 100 +--- .../data_models/frontend_Asset.datamodel.yaml | 4 - .../frontend_DayAheadBid.datamodel.yaml | 18 +- .../power-ops-types.powerops_nodes.yaml | 44 +- .../product_CogShop.datamodel.yaml | 30 + .../views/MarketConfiguration.view.yaml | 8 + .../data_models/views/PriceScenario.view.yaml | 41 -- .../data_models/views/SHOPResult.view.yaml | 88 --- .../BidConfigurationSHOP.view.yaml | 8 + .../bid_matrix/MultiScenarioMatrix.view.yaml | 12 +- .../1.interface.BidMethodDayAhead.view.yaml | 15 +- .../BidMethodSHOPMultiScenario.view.yaml | 34 +- .../data_models/views/cogshop/Case.view.yaml | 93 +++ .../views/cogshop/Commands.view.yaml | 33 ++ .../views/{ => cogshop}/Mapping.view.yaml | 0 .../{ => cogshop}/ModelTemplate.view.yaml | 24 +- .../views/cogshop/Scenario.view.yaml | 73 +++ .../PreprocessorInput.view.yaml | 25 +- .../ShopPartialBidCalculationInput.view.yaml | 31 +- .../PreprocessorOutput.view.yaml | 8 +- .../1-interface.ScenarioBase.view.yaml | 141 ----- .../views/scenario/Scenario.view.yaml | 29 - .../views/scenario/ScenarioRaw.view.yaml | 29 - .../1-interface.SHOPResult.view.yaml | 93 +++ .../views/shop_result/PriceProdCase.view.yaml | 46 ++ .../shop_result/SHOPResultPriceProd.view.yaml | 55 ++ .../shop_result/SHOPTimeSeries.view.yaml | 57 ++ .../ShopObjectiveValue.view.yaml | 0 pyproject.toml | 2 +- scripts/write_read_instances.py | 7 - 115 files changed, 8996 insertions(+), 2023 deletions(-) create mode 100644 cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario_scenarios.py create mode 100644 cognite/powerops/client/_generated/v1/_api/case.py create mode 100644 cognite/powerops/client/_generated/v1/_api/case_query.py create mode 100644 cognite/powerops/client/_generated/v1/_api/commands.py create mode 100644 cognite/powerops/client/_generated/v1/_api/commands_query.py create mode 100644 cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix_scenario_results.py create mode 100644 cognite/powerops/client/_generated/v1/_api/price_prod_case.py create mode 100644 cognite/powerops/client/_generated/v1/_api/price_prod_case_price.py create mode 100644 cognite/powerops/client/_generated/v1/_api/price_prod_case_production.py create mode 100644 cognite/powerops/client/_generated/v1/_api/price_prod_case_query.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input_shop_result_price_prod.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_result_output_timeseries.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_result_price_prod.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_alerts.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_output_timeseries.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_production_timeseries.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_query.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_time_series.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_time_series_query.py create mode 100644 cognite/powerops/client/_generated/v1/_api/shop_time_series_timeseries.py create mode 100644 cognite/powerops/client/_generated/v1/data_classes/_case.py create mode 100644 cognite/powerops/client/_generated/v1/data_classes/_commands.py create mode 100644 cognite/powerops/client/_generated/v1/data_classes/_price_prod_case.py create mode 100644 cognite/powerops/client/_generated/v1/data_classes/_shop_result_price_prod.py create mode 100644 cognite/powerops/client/_generated/v1/data_classes/_shop_time_series.py create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/containers/Case.container.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/containers/CommandsConfig.container.yaml delete mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/containers/MultiScenarioMatrix.container.yaml delete mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/containers/PriceScenario.container.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/containers/SHOPResult.container.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/containers/SHOPTimeSeries.container.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/product_CogShop.datamodel.yaml delete mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/PriceScenario.view.yaml delete mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/SHOPResult.view.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Case.view.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Commands.view.yaml rename cognite/powerops/custom_modules/power_model_v1/data_models/views/{ => cogshop}/Mapping.view.yaml (100%) rename cognite/powerops/custom_modules/power_model_v1/data_models/views/{ => cogshop}/ModelTemplate.view.yaml (78%) create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Scenario.view.yaml delete mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/1-interface.ScenarioBase.view.yaml delete mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/Scenario.view.yaml delete mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/ScenarioRaw.view.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/1-interface.SHOPResult.view.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/PriceProdCase.view.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/SHOPResultPriceProd.view.yaml create mode 100644 cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/SHOPTimeSeries.view.yaml rename cognite/powerops/custom_modules/power_model_v1/data_models/views/{ => shop_result}/ShopObjectiveValue.view.yaml (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c1298da6..bf3be192b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,15 @@ Changes are grouped as follows - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [0.90.0] - 2024-03-12 +### Changed +* `v1` data model with domain informed changes + - Added a separate cogshop data model + - Changes to `Scenario` type to accommodate for `Incremental mappings`. Preprocessor will now take in a `Scenario` and + output a `Case` + - Added wrapper for SHOP output timeseries - SHOPTimeSeries + - Added interface for a generic SHOP result - SHOPResult + ## [0.88.9] - 2024-03-07 ### Fixed * Transformation `AddWaterInTransit` apply returns input time series if discharge is empty diff --git a/cognite/powerops/_version.py b/cognite/powerops/_version.py index 55b6baac9..5167a0157 100644 --- a/cognite/powerops/_version.py +++ b/cognite/powerops/_version.py @@ -1 +1 @@ -__version__ = "0.88.9" +__version__ = "0.90.0" diff --git a/cognite/powerops/client/_generated/afrr_bid/_api_client.py b/cognite/powerops/client/_generated/afrr_bid/_api_client.py index bd1ea1f47..4d2e4e044 100644 --- a/cognite/powerops/client/_generated/afrr_bid/_api_client.py +++ b/cognite/powerops/client/_generated/afrr_bid/_api_client.py @@ -24,8 +24,8 @@ class AFRRBidAPI: Generated with: pygen = 0.99.11 - cognite-sdk = 7.20.0 - pydantic = 2.6.1 + cognite-sdk = 7.26.0 + pydantic = 2.6.3 Data Model: space: power-ops-afrr-bid diff --git a/cognite/powerops/client/_generated/assets/_api_client.py b/cognite/powerops/client/_generated/assets/_api_client.py index 1d59d7895..0a4581f82 100644 --- a/cognite/powerops/client/_generated/assets/_api_client.py +++ b/cognite/powerops/client/_generated/assets/_api_client.py @@ -27,8 +27,8 @@ class PowerAssetAPI: Generated with: pygen = 0.99.11 - cognite-sdk = 7.20.0 - pydantic = 2.6.1 + cognite-sdk = 7.26.0 + pydantic = 2.6.3 Data Model: space: power-ops-assets diff --git a/cognite/powerops/client/_generated/day_ahead_bid/_api_client.py b/cognite/powerops/client/_generated/day_ahead_bid/_api_client.py index d3c09333f..924c01d26 100644 --- a/cognite/powerops/client/_generated/day_ahead_bid/_api_client.py +++ b/cognite/powerops/client/_generated/day_ahead_bid/_api_client.py @@ -30,8 +30,8 @@ class DayAheadBidAPI: Generated with: pygen = 0.99.11 - cognite-sdk = 7.20.0 - pydantic = 2.6.1 + cognite-sdk = 7.26.0 + pydantic = 2.6.3 Data Model: space: power-ops-day-ahead-bid diff --git a/cognite/powerops/client/_generated/v1/_api/bid_configuration_shop.py b/cognite/powerops/client/_generated/v1/_api/bid_configuration_shop.py index 37a77bb18..987350ca1 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_configuration_shop.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_configuration_shop.py @@ -6,6 +6,7 @@ from cognite.client import CogniteClient from cognite.client import data_modeling as dm +from cognite.client.data_classes.data_modeling.instances import InstanceAggregationResultList from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE from cognite.powerops.client._generated.v1.data_classes import ( @@ -14,10 +15,13 @@ ResourcesWriteResult, BidConfigurationShop, BidConfigurationShopWrite, + BidConfigurationShopFields, BidConfigurationShopList, BidConfigurationShopWriteList, + BidConfigurationShopTextFields, ) from cognite.powerops.client._generated.v1.data_classes._bid_configuration_shop import ( + _BIDCONFIGURATIONSHOP_PROPERTIES_BY_FIELD, _create_bid_configuration_shop_filter, ) from ._core import ( @@ -52,6 +56,8 @@ def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainMo def __call__( self, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, method: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, price_area: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, @@ -63,6 +69,8 @@ def __call__( Args: market_configuration: The market configuration to filter on. + name: The name to filter on. + name_prefix: The prefix of the name to filter on. method: The method to filter on. price_area: The price area to filter on. external_id_prefix: The prefix of the external ID to filter on. @@ -78,6 +86,8 @@ def __call__( filter_ = _create_bid_configuration_shop_filter( self._view_id, market_configuration, + name, + name_prefix, method, price_area, external_id_prefix, @@ -213,9 +223,248 @@ def retrieve( ], ) + def search( + self, + query: str, + properties: BidConfigurationShopTextFields | Sequence[BidConfigurationShopTextFields] | None = None, + market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, + method: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_area: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> BidConfigurationShopList: + """Search bid configuration shops + + Args: + query: The search query, + properties: The property to search, if nothing is passed all text fields will be searched. + market_configuration: The market configuration to filter on. + name: The name to filter on. + name_prefix: The prefix of the name to filter on. + method: The method to filter on. + price_area: The price area to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of bid configuration shops to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Search results bid configuration shops matching the query. + + Examples: + + Search for 'my_bid_configuration_shop' in all text properties: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> bid_configuration_shops = client.bid_configuration_shop.search('my_bid_configuration_shop') + + """ + filter_ = _create_bid_configuration_shop_filter( + self._view_id, + market_configuration, + name, + name_prefix, + method, + price_area, + external_id_prefix, + space, + filter, + ) + return self._search(self._view_id, query, _BIDCONFIGURATIONSHOP_PROPERTIES_BY_FIELD, properties, filter_, limit) + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: BidConfigurationShopFields | Sequence[BidConfigurationShopFields] | None = None, + group_by: None = None, + query: str | None = None, + search_properties: BidConfigurationShopTextFields | Sequence[BidConfigurationShopTextFields] | None = None, + market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, + method: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_area: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue]: ... + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: BidConfigurationShopFields | Sequence[BidConfigurationShopFields] | None = None, + group_by: BidConfigurationShopFields | Sequence[BidConfigurationShopFields] = None, + query: str | None = None, + search_properties: BidConfigurationShopTextFields | Sequence[BidConfigurationShopTextFields] | None = None, + market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, + method: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_area: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> InstanceAggregationResultList: ... + + def aggregate( + self, + aggregate: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: BidConfigurationShopFields | Sequence[BidConfigurationShopFields] | None = None, + group_by: BidConfigurationShopFields | Sequence[BidConfigurationShopFields] | None = None, + query: str | None = None, + search_property: BidConfigurationShopTextFields | Sequence[BidConfigurationShopTextFields] | None = None, + market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, + method: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_area: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue] | InstanceAggregationResultList: + """Aggregate data across bid configuration shops + + Args: + aggregate: The aggregation to perform. + property: The property to perform aggregation on. + group_by: The property to group by when doing the aggregation. + query: The query to search for in the text field. + search_property: The text field to search in. + market_configuration: The market configuration to filter on. + name: The name to filter on. + name_prefix: The prefix of the name to filter on. + method: The method to filter on. + price_area: The price area to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of bid configuration shops to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Aggregation results. + + Examples: + + Count bid configuration shops in space `my_space`: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> result = client.bid_configuration_shop.aggregate("count", space="my_space") + + """ + + filter_ = _create_bid_configuration_shop_filter( + self._view_id, + market_configuration, + name, + name_prefix, + method, + price_area, + external_id_prefix, + space, + filter, + ) + return self._aggregate( + self._view_id, + aggregate, + _BIDCONFIGURATIONSHOP_PROPERTIES_BY_FIELD, + property, + group_by, + query, + search_property, + limit, + filter_, + ) + + def histogram( + self, + property: BidConfigurationShopFields, + interval: float, + query: str | None = None, + search_property: BidConfigurationShopTextFields | Sequence[BidConfigurationShopTextFields] | None = None, + market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, + method: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_area: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> dm.aggregations.HistogramValue: + """Produces histograms for bid configuration shops + + Args: + property: The property to use as the value in the histogram. + interval: The interval to use for the histogram bins. + query: The query to search for in the text field. + search_property: The text field to search in. + market_configuration: The market configuration to filter on. + name: The name to filter on. + name_prefix: The prefix of the name to filter on. + method: The method to filter on. + price_area: The price area to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of bid configuration shops to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Bucketed histogram results. + + """ + filter_ = _create_bid_configuration_shop_filter( + self._view_id, + market_configuration, + name, + name_prefix, + method, + price_area, + external_id_prefix, + space, + filter, + ) + return self._histogram( + self._view_id, + property, + interval, + _BIDCONFIGURATIONSHOP_PROPERTIES_BY_FIELD, + query, + search_property, + limit, + filter_, + ) + def list( self, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, method: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, price_area: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, @@ -228,6 +477,8 @@ def list( Args: market_configuration: The market configuration to filter on. + name: The name to filter on. + name_prefix: The prefix of the name to filter on. method: The method to filter on. price_area: The price area to filter on. external_id_prefix: The prefix of the external ID to filter on. @@ -251,6 +502,8 @@ def list( filter_ = _create_bid_configuration_shop_filter( self._view_id, market_configuration, + name, + name_prefix, method, price_area, external_id_prefix, diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_custom.py b/cognite/powerops/client/_generated/v1/_api/bid_method_custom.py index 98422bfcd..be3bfe7ee 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_method_custom.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_custom.py @@ -53,7 +53,6 @@ def __call__( self, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -64,7 +63,6 @@ def __call__( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method customs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -79,7 +77,6 @@ def __call__( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -195,7 +192,6 @@ def search( properties: BidMethodCustomTextFields | Sequence[BidMethodCustomTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -208,7 +204,6 @@ def search( properties: The property to search, if nothing is passed all text fields will be searched. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method customs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -230,7 +225,6 @@ def search( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -252,7 +246,6 @@ def aggregate( search_properties: BidMethodCustomTextFields | Sequence[BidMethodCustomTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -274,7 +267,6 @@ def aggregate( search_properties: BidMethodCustomTextFields | Sequence[BidMethodCustomTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -295,7 +287,6 @@ def aggregate( search_property: BidMethodCustomTextFields | Sequence[BidMethodCustomTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -311,7 +302,6 @@ def aggregate( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method customs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -334,7 +324,6 @@ def aggregate( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -359,7 +348,6 @@ def histogram( search_property: BidMethodCustomTextFields | Sequence[BidMethodCustomTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -374,7 +362,6 @@ def histogram( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method customs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -388,7 +375,6 @@ def histogram( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -408,7 +394,6 @@ def list( self, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -419,7 +404,6 @@ def list( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method customs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -441,7 +425,6 @@ def list( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_custom_query.py b/cognite/powerops/client/_generated/v1/_api/bid_method_custom_query.py index 7e6849fae..7e72de760 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_method_custom_query.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_custom_query.py @@ -8,7 +8,6 @@ from cognite.powerops.client._generated.v1.data_classes import ( DomainModelCore, BidMethodCustom, - Mapping, ) from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter @@ -39,35 +38,11 @@ def __init__( def query( self, - retrieve_main_scenario: bool = False, ) -> T_DomainModelList: """Execute query and return the result. - Args: - retrieve_main_scenario: Whether to retrieve the main scenario for each bid method custom or not. - Returns: The list of the source nodes of the query. """ - from_ = self._builder[-1].name - if retrieve_main_scenario: - self._query_append_main_scenario(from_) return self._query() - - def _query_append_main_scenario(self, from_: str) -> None: - view_id = self._view_by_read_class[Mapping] - self._builder.append( - QueryStep( - name=self._builder.next_name("main_scenario"), - expression=dm.query.NodeResultSetExpression( - filter=dm.filters.HasData(views=[view_id]), - from_=from_, - through=self._view_by_read_class[BidMethodCustom].as_property_ref("mainScenario"), - direction="outwards", - ), - select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), - max_retrieve_limit=-1, - result_cls=Mapping, - ), - ) diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_day_ahead.py b/cognite/powerops/client/_generated/v1/_api/bid_method_day_ahead.py index 3519873f2..af1bd21a3 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_method_day_ahead.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_day_ahead.py @@ -53,7 +53,6 @@ def __call__( self, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -64,7 +63,6 @@ def __call__( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method day aheads to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -79,7 +77,6 @@ def __call__( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -195,7 +192,6 @@ def search( properties: BidMethodDayAheadTextFields | Sequence[BidMethodDayAheadTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -208,7 +204,6 @@ def search( properties: The property to search, if nothing is passed all text fields will be searched. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method day aheads to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -230,7 +225,6 @@ def search( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -252,7 +246,6 @@ def aggregate( search_properties: BidMethodDayAheadTextFields | Sequence[BidMethodDayAheadTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -274,7 +267,6 @@ def aggregate( search_properties: BidMethodDayAheadTextFields | Sequence[BidMethodDayAheadTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -295,7 +287,6 @@ def aggregate( search_property: BidMethodDayAheadTextFields | Sequence[BidMethodDayAheadTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -311,7 +302,6 @@ def aggregate( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method day aheads to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -334,7 +324,6 @@ def aggregate( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -359,7 +348,6 @@ def histogram( search_property: BidMethodDayAheadTextFields | Sequence[BidMethodDayAheadTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -374,7 +362,6 @@ def histogram( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method day aheads to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -388,7 +375,6 @@ def histogram( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -408,7 +394,6 @@ def list( self, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -419,7 +404,6 @@ def list( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method day aheads to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -441,7 +425,6 @@ def list( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_day_ahead_query.py b/cognite/powerops/client/_generated/v1/_api/bid_method_day_ahead_query.py index be60a8b52..2d7102ed8 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_method_day_ahead_query.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_day_ahead_query.py @@ -8,7 +8,6 @@ from cognite.powerops.client._generated.v1.data_classes import ( DomainModelCore, BidMethodDayAhead, - Mapping, ) from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter @@ -39,35 +38,11 @@ def __init__( def query( self, - retrieve_main_scenario: bool = False, ) -> T_DomainModelList: """Execute query and return the result. - Args: - retrieve_main_scenario: Whether to retrieve the main scenario for each bid method day ahead or not. - Returns: The list of the source nodes of the query. """ - from_ = self._builder[-1].name - if retrieve_main_scenario: - self._query_append_main_scenario(from_) return self._query() - - def _query_append_main_scenario(self, from_: str) -> None: - view_id = self._view_by_read_class[Mapping] - self._builder.append( - QueryStep( - name=self._builder.next_name("main_scenario"), - expression=dm.query.NodeResultSetExpression( - filter=dm.filters.HasData(views=[view_id]), - from_=from_, - through=self._view_by_read_class[BidMethodDayAhead].as_property_ref("mainScenario"), - direction="outwards", - ), - select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), - max_retrieve_limit=-1, - result_cls=Mapping, - ), - ) diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario.py b/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario.py index 9c7d7a88d..6e4e0a42d 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario.py @@ -33,7 +33,7 @@ QueryStep, QueryBuilder, ) -from .bid_method_shop_multi_scenario_price_scenarios import BidMethodSHOPMultiScenarioPriceScenariosAPI +from .bid_method_shop_multi_scenario_scenarios import BidMethodSHOPMultiScenarioScenariosAPI from .bid_method_shop_multi_scenario_query import BidMethodSHOPMultiScenarioQueryAPI @@ -51,13 +51,18 @@ def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainMo view_by_read_class=view_by_read_class, ) self._view_id = view_id - self.price_scenarios_edge = BidMethodSHOPMultiScenarioPriceScenariosAPI(client) + self.scenarios_edge = BidMethodSHOPMultiScenarioScenariosAPI(client) def __call__( self, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + shop_start_specification: str | list[str] | None = None, + shop_start_specification_prefix: str | None = None, + shop_end_specification: str | list[str] | None = None, + shop_end_specification_prefix: str | None = None, + shop_bid_date_specification: str | list[str] | None = None, + shop_bid_date_specification_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -68,7 +73,12 @@ def __call__( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. + shop_start_specification: The shop start specification to filter on. + shop_start_specification_prefix: The prefix of the shop start specification to filter on. + shop_end_specification: The shop end specification to filter on. + shop_end_specification_prefix: The prefix of the shop end specification to filter on. + shop_bid_date_specification: The shop bid date specification to filter on. + shop_bid_date_specification_prefix: The prefix of the shop bid date specification to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method shop multi scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -83,7 +93,12 @@ def __call__( self._view_id, name, name_prefix, - main_scenario, + shop_start_specification, + shop_start_specification_prefix, + shop_end_specification, + shop_end_specification_prefix, + shop_bid_date_specification, + shop_bid_date_specification_prefix, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -100,7 +115,7 @@ def apply( """Add or update (upsert) bid method shop multi scenarios. Note: This method iterates through all nodes and timeseries linked to bid_method_shop_multi_scenario and creates them including the edges - between the nodes. For example, if any of `price_scenarios` are set, then these + between the nodes. For example, if any of `scenarios` are set, then these nodes as well as any nodes linked to them, and all the edges linking these nodes will be created. Args: @@ -201,11 +216,11 @@ def retrieve( retrieve_edges=True, edge_api_name_type_direction_view_id_penta=[ ( - self.price_scenarios_edge, - "price_scenarios", - dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.priceScenarios"), + self.scenarios_edge, + "scenarios", + dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.scenarios"), "outwards", - dm.ViewId("sp_powerops_models", "Mapping", "1"), + dm.ViewId("sp_powerops_models", "Scenario", "1"), ), ], ) @@ -216,7 +231,12 @@ def search( properties: BidMethodSHOPMultiScenarioTextFields | Sequence[BidMethodSHOPMultiScenarioTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + shop_start_specification: str | list[str] | None = None, + shop_start_specification_prefix: str | None = None, + shop_end_specification: str | list[str] | None = None, + shop_end_specification_prefix: str | None = None, + shop_bid_date_specification: str | list[str] | None = None, + shop_bid_date_specification_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -229,7 +249,12 @@ def search( properties: The property to search, if nothing is passed all text fields will be searched. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. + shop_start_specification: The shop start specification to filter on. + shop_start_specification_prefix: The prefix of the shop start specification to filter on. + shop_end_specification: The shop end specification to filter on. + shop_end_specification_prefix: The prefix of the shop end specification to filter on. + shop_bid_date_specification: The shop bid date specification to filter on. + shop_bid_date_specification_prefix: The prefix of the shop bid date specification to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method shop multi scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -251,7 +276,12 @@ def search( self._view_id, name, name_prefix, - main_scenario, + shop_start_specification, + shop_start_specification_prefix, + shop_end_specification, + shop_end_specification_prefix, + shop_bid_date_specification, + shop_bid_date_specification_prefix, external_id_prefix, space, filter, @@ -277,7 +307,12 @@ def aggregate( ) = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + shop_start_specification: str | list[str] | None = None, + shop_start_specification_prefix: str | None = None, + shop_end_specification: str | list[str] | None = None, + shop_end_specification_prefix: str | None = None, + shop_bid_date_specification: str | list[str] | None = None, + shop_bid_date_specification_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -301,7 +336,12 @@ def aggregate( ) = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + shop_start_specification: str | list[str] | None = None, + shop_start_specification_prefix: str | None = None, + shop_end_specification: str | list[str] | None = None, + shop_end_specification_prefix: str | None = None, + shop_bid_date_specification: str | list[str] | None = None, + shop_bid_date_specification_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -324,7 +364,12 @@ def aggregate( ) = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + shop_start_specification: str | list[str] | None = None, + shop_start_specification_prefix: str | None = None, + shop_end_specification: str | list[str] | None = None, + shop_end_specification_prefix: str | None = None, + shop_bid_date_specification: str | list[str] | None = None, + shop_bid_date_specification_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -340,7 +385,12 @@ def aggregate( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. + shop_start_specification: The shop start specification to filter on. + shop_start_specification_prefix: The prefix of the shop start specification to filter on. + shop_end_specification: The shop end specification to filter on. + shop_end_specification_prefix: The prefix of the shop end specification to filter on. + shop_bid_date_specification: The shop bid date specification to filter on. + shop_bid_date_specification_prefix: The prefix of the shop bid date specification to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method shop multi scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -363,7 +413,12 @@ def aggregate( self._view_id, name, name_prefix, - main_scenario, + shop_start_specification, + shop_start_specification_prefix, + shop_end_specification, + shop_end_specification_prefix, + shop_bid_date_specification, + shop_bid_date_specification_prefix, external_id_prefix, space, filter, @@ -390,7 +445,12 @@ def histogram( ) = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + shop_start_specification: str | list[str] | None = None, + shop_start_specification_prefix: str | None = None, + shop_end_specification: str | list[str] | None = None, + shop_end_specification_prefix: str | None = None, + shop_bid_date_specification: str | list[str] | None = None, + shop_bid_date_specification_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -405,7 +465,12 @@ def histogram( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. + shop_start_specification: The shop start specification to filter on. + shop_start_specification_prefix: The prefix of the shop start specification to filter on. + shop_end_specification: The shop end specification to filter on. + shop_end_specification_prefix: The prefix of the shop end specification to filter on. + shop_bid_date_specification: The shop bid date specification to filter on. + shop_bid_date_specification_prefix: The prefix of the shop bid date specification to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method shop multi scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -419,7 +484,12 @@ def histogram( self._view_id, name, name_prefix, - main_scenario, + shop_start_specification, + shop_start_specification_prefix, + shop_end_specification, + shop_end_specification_prefix, + shop_bid_date_specification, + shop_bid_date_specification_prefix, external_id_prefix, space, filter, @@ -439,7 +509,12 @@ def list( self, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + shop_start_specification: str | list[str] | None = None, + shop_start_specification_prefix: str | None = None, + shop_end_specification: str | list[str] | None = None, + shop_end_specification_prefix: str | None = None, + shop_bid_date_specification: str | list[str] | None = None, + shop_bid_date_specification_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -451,12 +526,17 @@ def list( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. + shop_start_specification: The shop start specification to filter on. + shop_start_specification_prefix: The prefix of the shop start specification to filter on. + shop_end_specification: The shop end specification to filter on. + shop_end_specification_prefix: The prefix of the shop end specification to filter on. + shop_bid_date_specification: The shop bid date specification to filter on. + shop_bid_date_specification_prefix: The prefix of the shop bid date specification to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method shop multi scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. - retrieve_edges: Whether to retrieve `price_scenarios` external ids for the bid method shop multi scenarios. Defaults to True. + retrieve_edges: Whether to retrieve `scenarios` external ids for the bid method shop multi scenarios. Defaults to True. Returns: List of requested bid method shop multi scenarios @@ -474,7 +554,12 @@ def list( self._view_id, name, name_prefix, - main_scenario, + shop_start_specification, + shop_start_specification_prefix, + shop_end_specification, + shop_end_specification_prefix, + shop_bid_date_specification, + shop_bid_date_specification_prefix, external_id_prefix, space, filter, @@ -486,11 +571,11 @@ def list( retrieve_edges=retrieve_edges, edge_api_name_type_direction_view_id_penta=[ ( - self.price_scenarios_edge, - "price_scenarios", - dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.priceScenarios"), + self.scenarios_edge, + "scenarios", + dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.scenarios"), "outwards", - dm.ViewId("sp_powerops_models", "Mapping", "1"), + dm.ViewId("sp_powerops_models", "Scenario", "1"), ), ], ) diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario_query.py b/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario_query.py index 342d234ed..433588b85 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario_query.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario_query.py @@ -8,12 +8,11 @@ from cognite.powerops.client._generated.v1.data_classes import ( DomainModelCore, BidMethodSHOPMultiScenario, - Mapping, ) from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter if TYPE_CHECKING: - from .mapping_query import MappingQueryAPI + from .scenario_query import ScenarioQueryAPI class BidMethodSHOPMultiScenarioQueryAPI(QueryAPI[T_DomainModelList]): @@ -42,37 +41,35 @@ def __init__( ) ) - def price_scenarios( + def scenarios( self, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, - retrieve_main_scenario: bool = False, - ) -> MappingQueryAPI[T_DomainModelList]: - """Query along the price scenario edges of the bid method shop multi scenario. + ) -> ScenarioQueryAPI[T_DomainModelList]: + """Query along the scenario edges of the bid method shop multi scenario. Args: external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. - limit: Maximum number of price scenario edges to return. Defaults to 25. Set to -1, float("inf") or None + limit: Maximum number of scenario edges to return. Defaults to 25. Set to -1, float("inf") or None to return all items. - retrieve_main_scenario: Whether to retrieve the main scenario for each bid method shop multi scenario or not. Returns: - MappingQueryAPI: The query API for the mapping. + ScenarioQueryAPI: The query API for the scenario. """ - from .mapping_query import MappingQueryAPI + from .scenario_query import ScenarioQueryAPI from_ = self._builder[-1].name edge_filter = _create_edge_filter( - dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.priceScenarios"), + dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.scenarios"), external_id_prefix=external_id_prefix, space=space, ) self._builder.append( QueryStep( - name=self._builder.next_name("price_scenarios"), + name=self._builder.next_name("scenarios"), expression=dm.query.EdgeResultSetExpression( filter=edge_filter, from_=from_, @@ -82,41 +79,15 @@ def price_scenarios( max_retrieve_limit=limit, ) ) - if retrieve_main_scenario: - self._query_append_main_scenario(from_) - return MappingQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) + return ScenarioQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) def query( self, - retrieve_main_scenario: bool = False, ) -> T_DomainModelList: """Execute query and return the result. - Args: - retrieve_main_scenario: Whether to retrieve the main scenario for each bid method shop multi scenario or not. - Returns: The list of the source nodes of the query. """ - from_ = self._builder[-1].name - if retrieve_main_scenario: - self._query_append_main_scenario(from_) return self._query() - - def _query_append_main_scenario(self, from_: str) -> None: - view_id = self._view_by_read_class[Mapping] - self._builder.append( - QueryStep( - name=self._builder.next_name("main_scenario"), - expression=dm.query.NodeResultSetExpression( - filter=dm.filters.HasData(views=[view_id]), - from_=from_, - through=self._view_by_read_class[BidMethodSHOPMultiScenario].as_property_ref("mainScenario"), - direction="outwards", - ), - select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), - max_retrieve_limit=-1, - result_cls=Mapping, - ), - ) diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario_scenarios.py b/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario_scenarios.py new file mode 100644 index 000000000..57d71de37 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_shop_multi_scenario_scenarios.py @@ -0,0 +1,54 @@ +from __future__ import annotations + + +from cognite.client import data_modeling as dm + +from ._core import DEFAULT_LIMIT_READ, EdgeAPI, _create_edge_filter +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE + + +class BidMethodSHOPMultiScenarioScenariosAPI(EdgeAPI): + def list( + self, + from_bid_method_shop_multi_scenario: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + from_bid_method_shop_multi_scenario_space: str = DEFAULT_INSTANCE_SPACE, + to_scenario: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + to_scenario_space: str = DEFAULT_INSTANCE_SPACE, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit=DEFAULT_LIMIT_READ, + ) -> dm.EdgeList: + """List scenario edges of a bid method shop multi scenario. + + Args: + from_bid_method_shop_multi_scenario: ID of the source bid method shop multi scenario. + from_bid_method_shop_multi_scenario_space: Location of the bid method shop multi scenarios. + to_scenario: ID of the target scenario. + to_scenario_space: Location of the scenarios. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of scenario edges to return. Defaults to 25. Set to -1, float("inf") or None + to return all items. + + Returns: + The requested scenario edges. + + Examples: + + List 5 scenario edges connected to "my_bid_method_shop_multi_scenario": + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> bid_method_shop_multi_scenario = client.bid_method_shop_multi_scenario.scenarios_edge.list("my_bid_method_shop_multi_scenario", limit=5) + + """ + filter_ = _create_edge_filter( + dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.scenarios"), + from_bid_method_shop_multi_scenario, + from_bid_method_shop_multi_scenario_space, + to_scenario, + to_scenario_space, + external_id_prefix, + space, + ) + return self._list(filter_=filter_, limit=limit) diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_water_value.py b/cognite/powerops/client/_generated/v1/_api/bid_method_water_value.py index d28ddd4cc..840fdd9cf 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_method_water_value.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_water_value.py @@ -53,7 +53,6 @@ def __call__( self, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -64,7 +63,6 @@ def __call__( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method water values to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -79,7 +77,6 @@ def __call__( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -195,7 +192,6 @@ def search( properties: BidMethodWaterValueTextFields | Sequence[BidMethodWaterValueTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -208,7 +204,6 @@ def search( properties: The property to search, if nothing is passed all text fields will be searched. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method water values to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -230,7 +225,6 @@ def search( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -252,7 +246,6 @@ def aggregate( search_properties: BidMethodWaterValueTextFields | Sequence[BidMethodWaterValueTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -274,7 +267,6 @@ def aggregate( search_properties: BidMethodWaterValueTextFields | Sequence[BidMethodWaterValueTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -295,7 +287,6 @@ def aggregate( search_property: BidMethodWaterValueTextFields | Sequence[BidMethodWaterValueTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -311,7 +302,6 @@ def aggregate( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method water values to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -334,7 +324,6 @@ def aggregate( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -359,7 +348,6 @@ def histogram( search_property: BidMethodWaterValueTextFields | Sequence[BidMethodWaterValueTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -374,7 +362,6 @@ def histogram( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method water values to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -388,7 +375,6 @@ def histogram( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, @@ -408,7 +394,6 @@ def list( self, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -419,7 +404,6 @@ def list( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - main_scenario: The main scenario to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of bid method water values to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -441,7 +425,6 @@ def list( self._view_id, name, name_prefix, - main_scenario, external_id_prefix, space, filter, diff --git a/cognite/powerops/client/_generated/v1/_api/bid_method_water_value_query.py b/cognite/powerops/client/_generated/v1/_api/bid_method_water_value_query.py index e32c515b5..f9714355f 100644 --- a/cognite/powerops/client/_generated/v1/_api/bid_method_water_value_query.py +++ b/cognite/powerops/client/_generated/v1/_api/bid_method_water_value_query.py @@ -8,7 +8,6 @@ from cognite.powerops.client._generated.v1.data_classes import ( DomainModelCore, BidMethodWaterValue, - Mapping, ) from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter @@ -39,35 +38,11 @@ def __init__( def query( self, - retrieve_main_scenario: bool = False, ) -> T_DomainModelList: """Execute query and return the result. - Args: - retrieve_main_scenario: Whether to retrieve the main scenario for each bid method water value or not. - Returns: The list of the source nodes of the query. """ - from_ = self._builder[-1].name - if retrieve_main_scenario: - self._query_append_main_scenario(from_) return self._query() - - def _query_append_main_scenario(self, from_: str) -> None: - view_id = self._view_by_read_class[Mapping] - self._builder.append( - QueryStep( - name=self._builder.next_name("main_scenario"), - expression=dm.query.NodeResultSetExpression( - filter=dm.filters.HasData(views=[view_id]), - from_=from_, - through=self._view_by_read_class[BidMethodWaterValue].as_property_ref("mainScenario"), - direction="outwards", - ), - select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), - max_retrieve_limit=-1, - result_cls=Mapping, - ), - ) diff --git a/cognite/powerops/client/_generated/v1/_api/case.py b/cognite/powerops/client/_generated/v1/_api/case.py new file mode 100644 index 000000000..6bd0cabb1 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/case.py @@ -0,0 +1,415 @@ +from __future__ import annotations + +import datetime +from collections.abc import Sequence +from typing import overload +import warnings + +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes.data_modeling.instances import InstanceAggregationResultList + +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + DomainModelWrite, + ResourcesWriteResult, + Case, + CaseWrite, + CaseFields, + CaseList, + CaseWriteList, +) +from cognite.powerops.client._generated.v1.data_classes._case import ( + _CASE_PROPERTIES_BY_FIELD, + _create_case_filter, +) +from ._core import ( + DEFAULT_LIMIT_READ, + DEFAULT_QUERY_LIMIT, + Aggregations, + NodeAPI, + SequenceNotStr, + QueryStep, + QueryBuilder, +) +from .case_query import CaseQueryAPI + + +class CaseAPI(NodeAPI[Case, CaseWrite, CaseList]): + def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainModelCore], dm.ViewId]): + view_id = view_by_read_class[Case] + super().__init__( + client=client, + sources=view_id, + class_type=Case, + class_list=CaseList, + class_write_list=CaseWriteList, + view_by_read_class=view_by_read_class, + ) + self._view_id = view_id + + def __call__( + self, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_start_time: datetime.date | None = None, + max_start_time: datetime.date | None = None, + min_end_time: datetime.date | None = None, + max_end_time: datetime.date | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_QUERY_LIMIT, + filter: dm.Filter | None = None, + ) -> CaseQueryAPI[CaseList]: + """Query starting at cases. + + Args: + scenario: The scenario to filter on. + min_start_time: The minimum value of the start time to filter on. + max_start_time: The maximum value of the start time to filter on. + min_end_time: The minimum value of the end time to filter on. + max_end_time: The maximum value of the end time to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query API for cases. + + """ + has_data = dm.filters.HasData(views=[self._view_id]) + filter_ = _create_case_filter( + self._view_id, + scenario, + min_start_time, + max_start_time, + min_end_time, + max_end_time, + external_id_prefix, + space, + (filter and dm.filters.And(filter, has_data)) or has_data, + ) + builder = QueryBuilder(CaseList) + return CaseQueryAPI(self._client, builder, self._view_by_read_class, filter_, limit) + + def apply( + self, + case: CaseWrite | Sequence[CaseWrite], + replace: bool = False, + write_none: bool = False, + ) -> ResourcesWriteResult: + """Add or update (upsert) cases. + + Args: + case: Case or sequence of cases to upsert. + replace (bool): How do we behave when a property value exists? Do we replace all matching and existing values with the supplied values (true)? + Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. + write_none (bool): This method, will by default, skip properties that are set to None. However, if you want to set properties to None, + you can set this parameter to True. Note this only applies to properties that are nullable. + Returns: + Created instance(s), i.e., nodes, edges, and time series. + + Examples: + + Create a new case: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from cognite.powerops.client._generated.v1.data_classes import CaseWrite + >>> client = PowerOpsModelsV1Client() + >>> case = CaseWrite(external_id="my_case", ...) + >>> result = client.case.apply(case) + + """ + warnings.warn( + "The .apply method is deprecated and will be removed in v1.0. " + "Please use the .upsert method on the client instead. This means instead of " + "`my_client.case.apply(my_items)` please use `my_client.upsert(my_items)`." + "The motivation is that all apply methods are the same, and having one apply method per API " + " class encourages users to create items in small batches, which is inefficient." + "In addition, .upsert method is more descriptive of what the method does.", + UserWarning, + stacklevel=2, + ) + return self._apply(case, replace, write_none) + + def delete( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> dm.InstancesDeleteResult: + """Delete one or more case. + + Args: + external_id: External id of the case to delete. + space: The space where all the case are located. + + Returns: + The instance(s), i.e., nodes and edges which has been deleted. Empty list if nothing was deleted. + + Examples: + + Delete case by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> client.case.delete("my_case") + """ + warnings.warn( + "The .delete method is deprecated and will be removed in v1.0. " + "Please use the .delete method on the client instead. This means instead of " + "`my_client.case.delete(my_ids)` please use `my_client.delete(my_ids)`." + "The motivation is that all delete methods are the same, and having one delete method per API " + " class encourages users to delete items in small batches, which is inefficient.", + UserWarning, + stacklevel=2, + ) + return self._delete(external_id, space) + + @overload + def retrieve(self, external_id: str, space: str = DEFAULT_INSTANCE_SPACE) -> Case | None: ... + + @overload + def retrieve(self, external_id: SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE) -> CaseList: ... + + def retrieve( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> Case | CaseList | None: + """Retrieve one or more cases by id(s). + + Args: + external_id: External id or list of external ids of the cases. + space: The space where all the cases are located. + + Returns: + The requested cases. + + Examples: + + Retrieve case by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> case = client.case.retrieve("my_case") + + """ + return self._retrieve(external_id, space) + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: CaseFields | Sequence[CaseFields] | None = None, + group_by: None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_start_time: datetime.date | None = None, + max_start_time: datetime.date | None = None, + min_end_time: datetime.date | None = None, + max_end_time: datetime.date | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue]: ... + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: CaseFields | Sequence[CaseFields] | None = None, + group_by: CaseFields | Sequence[CaseFields] = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_start_time: datetime.date | None = None, + max_start_time: datetime.date | None = None, + min_end_time: datetime.date | None = None, + max_end_time: datetime.date | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> InstanceAggregationResultList: ... + + def aggregate( + self, + aggregate: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: CaseFields | Sequence[CaseFields] | None = None, + group_by: CaseFields | Sequence[CaseFields] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_start_time: datetime.date | None = None, + max_start_time: datetime.date | None = None, + min_end_time: datetime.date | None = None, + max_end_time: datetime.date | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue] | InstanceAggregationResultList: + """Aggregate data across cases + + Args: + aggregate: The aggregation to perform. + property: The property to perform aggregation on. + group_by: The property to group by when doing the aggregation. + scenario: The scenario to filter on. + min_start_time: The minimum value of the start time to filter on. + max_start_time: The maximum value of the start time to filter on. + min_end_time: The minimum value of the end time to filter on. + max_end_time: The maximum value of the end time to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Aggregation results. + + Examples: + + Count cases in space `my_space`: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> result = client.case.aggregate("count", space="my_space") + + """ + + filter_ = _create_case_filter( + self._view_id, + scenario, + min_start_time, + max_start_time, + min_end_time, + max_end_time, + external_id_prefix, + space, + filter, + ) + return self._aggregate( + self._view_id, + aggregate, + _CASE_PROPERTIES_BY_FIELD, + property, + group_by, + None, + None, + limit, + filter_, + ) + + def histogram( + self, + property: CaseFields, + interval: float, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_start_time: datetime.date | None = None, + max_start_time: datetime.date | None = None, + min_end_time: datetime.date | None = None, + max_end_time: datetime.date | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> dm.aggregations.HistogramValue: + """Produces histograms for cases + + Args: + property: The property to use as the value in the histogram. + interval: The interval to use for the histogram bins. + scenario: The scenario to filter on. + min_start_time: The minimum value of the start time to filter on. + max_start_time: The maximum value of the start time to filter on. + min_end_time: The minimum value of the end time to filter on. + max_end_time: The maximum value of the end time to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Bucketed histogram results. + + """ + filter_ = _create_case_filter( + self._view_id, + scenario, + min_start_time, + max_start_time, + min_end_time, + max_end_time, + external_id_prefix, + space, + filter, + ) + return self._histogram( + self._view_id, + property, + interval, + _CASE_PROPERTIES_BY_FIELD, + None, + None, + limit, + filter_, + ) + + def list( + self, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_start_time: datetime.date | None = None, + max_start_time: datetime.date | None = None, + min_end_time: datetime.date | None = None, + max_end_time: datetime.date | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> CaseList: + """List/filter cases + + Args: + scenario: The scenario to filter on. + min_start_time: The minimum value of the start time to filter on. + max_start_time: The maximum value of the start time to filter on. + min_end_time: The minimum value of the end time to filter on. + max_end_time: The maximum value of the end time to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of requested cases + + Examples: + + List cases and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> cases = client.case.list(limit=5) + + """ + filter_ = _create_case_filter( + self._view_id, + scenario, + min_start_time, + max_start_time, + min_end_time, + max_end_time, + external_id_prefix, + space, + filter, + ) + return self._list(limit=limit, filter=filter_) diff --git a/cognite/powerops/client/_generated/v1/_api/case_query.py b/cognite/powerops/client/_generated/v1/_api/case_query.py new file mode 100644 index 000000000..866fff99f --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/case_query.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +import datetime +from typing import TYPE_CHECKING + +from cognite.client import data_modeling as dm, CogniteClient + +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + Case, + Scenario, +) +from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter + + +class CaseQueryAPI(QueryAPI[T_DomainModelList]): + def __init__( + self, + client: CogniteClient, + builder: QueryBuilder[T_DomainModelList], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId], + filter_: dm.filters.Filter | None = None, + limit: int = DEFAULT_QUERY_LIMIT, + ): + super().__init__(client, builder, view_by_read_class) + + self._builder.append( + QueryStep( + name=self._builder.next_name("case"), + expression=dm.query.NodeResultSetExpression( + from_=self._builder[-1].name if self._builder else None, + filter=filter_, + ), + select=dm.query.Select([dm.query.SourceSelector(self._view_by_read_class[Case], ["*"])]), + result_cls=Case, + max_retrieve_limit=limit, + ) + ) + + def query( + self, + retrieve_scenario: bool = False, + ) -> T_DomainModelList: + """Execute query and return the result. + + Args: + retrieve_scenario: Whether to retrieve the scenario for each case or not. + + Returns: + The list of the source nodes of the query. + + """ + from_ = self._builder[-1].name + if retrieve_scenario: + self._query_append_scenario(from_) + return self._query() + + def _query_append_scenario(self, from_: str) -> None: + view_id = self._view_by_read_class[Scenario] + self._builder.append( + QueryStep( + name=self._builder.next_name("scenario"), + expression=dm.query.NodeResultSetExpression( + filter=dm.filters.HasData(views=[view_id]), + from_=from_, + through=self._view_by_read_class[Case].as_property_ref("scenario"), + direction="outwards", + ), + select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), + max_retrieve_limit=-1, + result_cls=Scenario, + ), + ) diff --git a/cognite/powerops/client/_generated/v1/_api/commands.py b/cognite/powerops/client/_generated/v1/_api/commands.py new file mode 100644 index 000000000..c41504579 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/commands.py @@ -0,0 +1,396 @@ +from __future__ import annotations + +from collections.abc import Sequence +from typing import overload +import warnings + +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes.data_modeling.instances import InstanceAggregationResultList + +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + DomainModelWrite, + ResourcesWriteResult, + Commands, + CommandsWrite, + CommandsFields, + CommandsList, + CommandsWriteList, + CommandsTextFields, +) +from cognite.powerops.client._generated.v1.data_classes._commands import ( + _COMMANDS_PROPERTIES_BY_FIELD, + _create_command_filter, +) +from ._core import ( + DEFAULT_LIMIT_READ, + DEFAULT_QUERY_LIMIT, + Aggregations, + NodeAPI, + SequenceNotStr, + QueryStep, + QueryBuilder, +) +from .commands_query import CommandsQueryAPI + + +class CommandsAPI(NodeAPI[Commands, CommandsWrite, CommandsList]): + def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainModelCore], dm.ViewId]): + view_id = view_by_read_class[Commands] + super().__init__( + client=client, + sources=view_id, + class_type=Commands, + class_list=CommandsList, + class_write_list=CommandsWriteList, + view_by_read_class=view_by_read_class, + ) + self._view_id = view_id + + def __call__( + self, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_QUERY_LIMIT, + filter: dm.Filter | None = None, + ) -> CommandsQueryAPI[CommandsList]: + """Query starting at commands. + + Args: + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of commands to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query API for commands. + + """ + has_data = dm.filters.HasData(views=[self._view_id]) + filter_ = _create_command_filter( + self._view_id, + external_id_prefix, + space, + (filter and dm.filters.And(filter, has_data)) or has_data, + ) + builder = QueryBuilder(CommandsList) + return CommandsQueryAPI(self._client, builder, self._view_by_read_class, filter_, limit) + + def apply( + self, + command: CommandsWrite | Sequence[CommandsWrite], + replace: bool = False, + write_none: bool = False, + ) -> ResourcesWriteResult: + """Add or update (upsert) commands. + + Args: + command: Command or sequence of commands to upsert. + replace (bool): How do we behave when a property value exists? Do we replace all matching and existing values with the supplied values (true)? + Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. + write_none (bool): This method, will by default, skip properties that are set to None. However, if you want to set properties to None, + you can set this parameter to True. Note this only applies to properties that are nullable. + Returns: + Created instance(s), i.e., nodes, edges, and time series. + + Examples: + + Create a new command: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from cognite.powerops.client._generated.v1.data_classes import CommandsWrite + >>> client = PowerOpsModelsV1Client() + >>> command = CommandsWrite(external_id="my_command", ...) + >>> result = client.commands.apply(command) + + """ + warnings.warn( + "The .apply method is deprecated and will be removed in v1.0. " + "Please use the .upsert method on the client instead. This means instead of " + "`my_client.commands.apply(my_items)` please use `my_client.upsert(my_items)`." + "The motivation is that all apply methods are the same, and having one apply method per API " + " class encourages users to create items in small batches, which is inefficient." + "In addition, .upsert method is more descriptive of what the method does.", + UserWarning, + stacklevel=2, + ) + return self._apply(command, replace, write_none) + + def delete( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> dm.InstancesDeleteResult: + """Delete one or more command. + + Args: + external_id: External id of the command to delete. + space: The space where all the command are located. + + Returns: + The instance(s), i.e., nodes and edges which has been deleted. Empty list if nothing was deleted. + + Examples: + + Delete command by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> client.commands.delete("my_command") + """ + warnings.warn( + "The .delete method is deprecated and will be removed in v1.0. " + "Please use the .delete method on the client instead. This means instead of " + "`my_client.commands.delete(my_ids)` please use `my_client.delete(my_ids)`." + "The motivation is that all delete methods are the same, and having one delete method per API " + " class encourages users to delete items in small batches, which is inefficient.", + UserWarning, + stacklevel=2, + ) + return self._delete(external_id, space) + + @overload + def retrieve(self, external_id: str, space: str = DEFAULT_INSTANCE_SPACE) -> Commands | None: ... + + @overload + def retrieve(self, external_id: SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE) -> CommandsList: ... + + def retrieve( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> Commands | CommandsList | None: + """Retrieve one or more commands by id(s). + + Args: + external_id: External id or list of external ids of the commands. + space: The space where all the commands are located. + + Returns: + The requested commands. + + Examples: + + Retrieve command by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> command = client.commands.retrieve("my_command") + + """ + return self._retrieve(external_id, space) + + def search( + self, + query: str, + properties: CommandsTextFields | Sequence[CommandsTextFields] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> CommandsList: + """Search commands + + Args: + query: The search query, + properties: The property to search, if nothing is passed all text fields will be searched. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of commands to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Search results commands matching the query. + + Examples: + + Search for 'my_command' in all text properties: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> commands = client.commands.search('my_command') + + """ + filter_ = _create_command_filter( + self._view_id, + external_id_prefix, + space, + filter, + ) + return self._search(self._view_id, query, _COMMANDS_PROPERTIES_BY_FIELD, properties, filter_, limit) + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: CommandsFields | Sequence[CommandsFields] | None = None, + group_by: None = None, + query: str | None = None, + search_properties: CommandsTextFields | Sequence[CommandsTextFields] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue]: ... + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: CommandsFields | Sequence[CommandsFields] | None = None, + group_by: CommandsFields | Sequence[CommandsFields] = None, + query: str | None = None, + search_properties: CommandsTextFields | Sequence[CommandsTextFields] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> InstanceAggregationResultList: ... + + def aggregate( + self, + aggregate: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: CommandsFields | Sequence[CommandsFields] | None = None, + group_by: CommandsFields | Sequence[CommandsFields] | None = None, + query: str | None = None, + search_property: CommandsTextFields | Sequence[CommandsTextFields] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue] | InstanceAggregationResultList: + """Aggregate data across commands + + Args: + aggregate: The aggregation to perform. + property: The property to perform aggregation on. + group_by: The property to group by when doing the aggregation. + query: The query to search for in the text field. + search_property: The text field to search in. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of commands to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Aggregation results. + + Examples: + + Count commands in space `my_space`: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> result = client.commands.aggregate("count", space="my_space") + + """ + + filter_ = _create_command_filter( + self._view_id, + external_id_prefix, + space, + filter, + ) + return self._aggregate( + self._view_id, + aggregate, + _COMMANDS_PROPERTIES_BY_FIELD, + property, + group_by, + query, + search_property, + limit, + filter_, + ) + + def histogram( + self, + property: CommandsFields, + interval: float, + query: str | None = None, + search_property: CommandsTextFields | Sequence[CommandsTextFields] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> dm.aggregations.HistogramValue: + """Produces histograms for commands + + Args: + property: The property to use as the value in the histogram. + interval: The interval to use for the histogram bins. + query: The query to search for in the text field. + search_property: The text field to search in. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of commands to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Bucketed histogram results. + + """ + filter_ = _create_command_filter( + self._view_id, + external_id_prefix, + space, + filter, + ) + return self._histogram( + self._view_id, + property, + interval, + _COMMANDS_PROPERTIES_BY_FIELD, + query, + search_property, + limit, + filter_, + ) + + def list( + self, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> CommandsList: + """List/filter commands + + Args: + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of commands to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of requested commands + + Examples: + + List commands and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> commands = client.commands.list(limit=5) + + """ + filter_ = _create_command_filter( + self._view_id, + external_id_prefix, + space, + filter, + ) + return self._list(limit=limit, filter=filter_) diff --git a/cognite/powerops/client/_generated/v1/_api/commands_query.py b/cognite/powerops/client/_generated/v1/_api/commands_query.py new file mode 100644 index 000000000..a8234be32 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/commands_query.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import datetime +from typing import TYPE_CHECKING + +from cognite.client import data_modeling as dm, CogniteClient + +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + Commands, +) +from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter + + +class CommandsQueryAPI(QueryAPI[T_DomainModelList]): + def __init__( + self, + client: CogniteClient, + builder: QueryBuilder[T_DomainModelList], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId], + filter_: dm.filters.Filter | None = None, + limit: int = DEFAULT_QUERY_LIMIT, + ): + super().__init__(client, builder, view_by_read_class) + + self._builder.append( + QueryStep( + name=self._builder.next_name("command"), + expression=dm.query.NodeResultSetExpression( + from_=self._builder[-1].name if self._builder else None, + filter=filter_, + ), + select=dm.query.Select([dm.query.SourceSelector(self._view_by_read_class[Commands], ["*"])]), + result_cls=Commands, + max_retrieve_limit=limit, + ) + ) + + def query( + self, + ) -> T_DomainModelList: + """Execute query and return the result. + + Returns: + The list of the source nodes of the query. + + """ + return self._query() diff --git a/cognite/powerops/client/_generated/v1/_api/market_configuration.py b/cognite/powerops/client/_generated/v1/_api/market_configuration.py index cbf5293c7..089242de4 100644 --- a/cognite/powerops/client/_generated/v1/_api/market_configuration.py +++ b/cognite/powerops/client/_generated/v1/_api/market_configuration.py @@ -51,6 +51,8 @@ def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainMo def __call__( self, + name: str | list[str] | None = None, + name_prefix: str | None = None, market_type: str | list[str] | None = None, market_type_prefix: str | None = None, min_max_price: float | None = None, @@ -77,6 +79,8 @@ def __call__( """Query starting at market configurations. Args: + name: The name to filter on. + name_prefix: The prefix of the name to filter on. market_type: The market type to filter on. market_type_prefix: The prefix of the market type to filter on. min_max_price: The minimum value of the max price to filter on. @@ -107,6 +111,8 @@ def __call__( has_data = dm.filters.HasData(views=[self._view_id]) filter_ = _create_market_configuration_filter( self._view_id, + name, + name_prefix, market_type, market_type_prefix, min_max_price, @@ -238,6 +244,8 @@ def search( self, query: str, properties: MarketConfigurationTextFields | Sequence[MarketConfigurationTextFields] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, market_type: str | list[str] | None = None, market_type_prefix: str | None = None, min_max_price: float | None = None, @@ -266,6 +274,8 @@ def search( Args: query: The search query, properties: The property to search, if nothing is passed all text fields will be searched. + name: The name to filter on. + name_prefix: The prefix of the name to filter on. market_type: The market type to filter on. market_type_prefix: The prefix of the market type to filter on. min_max_price: The minimum value of the max price to filter on. @@ -303,6 +313,8 @@ def search( """ filter_ = _create_market_configuration_filter( self._view_id, + name, + name_prefix, market_type, market_type_prefix, min_max_price, @@ -340,6 +352,8 @@ def aggregate( group_by: None = None, query: str | None = None, search_properties: MarketConfigurationTextFields | Sequence[MarketConfigurationTextFields] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, market_type: str | list[str] | None = None, market_type_prefix: str | None = None, min_max_price: float | None = None, @@ -377,6 +391,8 @@ def aggregate( group_by: MarketConfigurationFields | Sequence[MarketConfigurationFields] = None, query: str | None = None, search_properties: MarketConfigurationTextFields | Sequence[MarketConfigurationTextFields] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, market_type: str | list[str] | None = None, market_type_prefix: str | None = None, min_max_price: float | None = None, @@ -413,6 +429,8 @@ def aggregate( group_by: MarketConfigurationFields | Sequence[MarketConfigurationFields] | None = None, query: str | None = None, search_property: MarketConfigurationTextFields | Sequence[MarketConfigurationTextFields] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, market_type: str | list[str] | None = None, market_type_prefix: str | None = None, min_max_price: float | None = None, @@ -444,6 +462,8 @@ def aggregate( group_by: The property to group by when doing the aggregation. query: The query to search for in the text field. search_property: The text field to search in. + name: The name to filter on. + name_prefix: The prefix of the name to filter on. market_type: The market type to filter on. market_type_prefix: The prefix of the market type to filter on. min_max_price: The minimum value of the max price to filter on. @@ -482,6 +502,8 @@ def aggregate( filter_ = _create_market_configuration_filter( self._view_id, + name, + name_prefix, market_type, market_type_prefix, min_max_price, @@ -522,6 +544,8 @@ def histogram( interval: float, query: str | None = None, search_property: MarketConfigurationTextFields | Sequence[MarketConfigurationTextFields] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, market_type: str | list[str] | None = None, market_type_prefix: str | None = None, min_max_price: float | None = None, @@ -552,6 +576,8 @@ def histogram( interval: The interval to use for the histogram bins. query: The query to search for in the text field. search_property: The text field to search in. + name: The name to filter on. + name_prefix: The prefix of the name to filter on. market_type: The market type to filter on. market_type_prefix: The prefix of the market type to filter on. min_max_price: The minimum value of the max price to filter on. @@ -581,6 +607,8 @@ def histogram( """ filter_ = _create_market_configuration_filter( self._view_id, + name, + name_prefix, market_type, market_type_prefix, min_max_price, @@ -616,6 +644,8 @@ def histogram( def list( self, + name: str | list[str] | None = None, + name_prefix: str | None = None, market_type: str | list[str] | None = None, market_type_prefix: str | None = None, min_max_price: float | None = None, @@ -642,6 +672,8 @@ def list( """List/filter market configurations Args: + name: The name to filter on. + name_prefix: The prefix of the name to filter on. market_type: The market type to filter on. market_type_prefix: The prefix of the market type to filter on. min_max_price: The minimum value of the max price to filter on. @@ -679,6 +711,8 @@ def list( """ filter_ = _create_market_configuration_filter( self._view_id, + name, + name_prefix, market_type, market_type_prefix, min_max_price, diff --git a/cognite/powerops/client/_generated/v1/_api/model_template.py b/cognite/powerops/client/_generated/v1/_api/model_template.py index bb6e79211..6a8959f26 100644 --- a/cognite/powerops/client/_generated/v1/_api/model_template.py +++ b/cognite/powerops/client/_generated/v1/_api/model_template.py @@ -53,13 +53,11 @@ def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainMo def __call__( self, - cog_shop_version: str | list[str] | None = None, - cog_shop_version_prefix: str | None = None, + version_: str | list[str] | None = None, + version_prefix: str | None = None, shop_version: str | list[str] | None = None, shop_version_prefix: str | None = None, watercourse: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - source: str | list[str] | None = None, - source_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -68,13 +66,11 @@ def __call__( """Query starting at model templates. Args: - cog_shop_version: The cog shop version to filter on. - cog_shop_version_prefix: The prefix of the cog shop version to filter on. + version_: The version to filter on. + version_prefix: The prefix of the version to filter on. shop_version: The shop version to filter on. shop_version_prefix: The prefix of the shop version to filter on. watercourse: The watercourse to filter on. - source: The source to filter on. - source_prefix: The prefix of the source to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of model templates to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -87,13 +83,11 @@ def __call__( has_data = dm.filters.HasData(views=[self._view_id]) filter_ = _create_model_template_filter( self._view_id, - cog_shop_version, - cog_shop_version_prefix, + version_, + version_prefix, shop_version, shop_version_prefix, watercourse, - source, - source_prefix, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -222,13 +216,11 @@ def search( self, query: str, properties: ModelTemplateTextFields | Sequence[ModelTemplateTextFields] | None = None, - cog_shop_version: str | list[str] | None = None, - cog_shop_version_prefix: str | None = None, + version_: str | list[str] | None = None, + version_prefix: str | None = None, shop_version: str | list[str] | None = None, shop_version_prefix: str | None = None, watercourse: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - source: str | list[str] | None = None, - source_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -239,13 +231,11 @@ def search( Args: query: The search query, properties: The property to search, if nothing is passed all text fields will be searched. - cog_shop_version: The cog shop version to filter on. - cog_shop_version_prefix: The prefix of the cog shop version to filter on. + version_: The version to filter on. + version_prefix: The prefix of the version to filter on. shop_version: The shop version to filter on. shop_version_prefix: The prefix of the shop version to filter on. watercourse: The watercourse to filter on. - source: The source to filter on. - source_prefix: The prefix of the source to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of model templates to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -265,13 +255,11 @@ def search( """ filter_ = _create_model_template_filter( self._view_id, - cog_shop_version, - cog_shop_version_prefix, + version_, + version_prefix, shop_version, shop_version_prefix, watercourse, - source, - source_prefix, external_id_prefix, space, filter, @@ -291,13 +279,11 @@ def aggregate( group_by: None = None, query: str | None = None, search_properties: ModelTemplateTextFields | Sequence[ModelTemplateTextFields] | None = None, - cog_shop_version: str | list[str] | None = None, - cog_shop_version_prefix: str | None = None, + version_: str | list[str] | None = None, + version_prefix: str | None = None, shop_version: str | list[str] | None = None, shop_version_prefix: str | None = None, watercourse: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - source: str | list[str] | None = None, - source_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -317,13 +303,11 @@ def aggregate( group_by: ModelTemplateFields | Sequence[ModelTemplateFields] = None, query: str | None = None, search_properties: ModelTemplateTextFields | Sequence[ModelTemplateTextFields] | None = None, - cog_shop_version: str | list[str] | None = None, - cog_shop_version_prefix: str | None = None, + version_: str | list[str] | None = None, + version_prefix: str | None = None, shop_version: str | list[str] | None = None, shop_version_prefix: str | None = None, watercourse: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - source: str | list[str] | None = None, - source_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -342,13 +326,11 @@ def aggregate( group_by: ModelTemplateFields | Sequence[ModelTemplateFields] | None = None, query: str | None = None, search_property: ModelTemplateTextFields | Sequence[ModelTemplateTextFields] | None = None, - cog_shop_version: str | list[str] | None = None, - cog_shop_version_prefix: str | None = None, + version_: str | list[str] | None = None, + version_prefix: str | None = None, shop_version: str | list[str] | None = None, shop_version_prefix: str | None = None, watercourse: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - source: str | list[str] | None = None, - source_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -362,13 +344,11 @@ def aggregate( group_by: The property to group by when doing the aggregation. query: The query to search for in the text field. search_property: The text field to search in. - cog_shop_version: The cog shop version to filter on. - cog_shop_version_prefix: The prefix of the cog shop version to filter on. + version_: The version to filter on. + version_prefix: The prefix of the version to filter on. shop_version: The shop version to filter on. shop_version_prefix: The prefix of the shop version to filter on. watercourse: The watercourse to filter on. - source: The source to filter on. - source_prefix: The prefix of the source to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of model templates to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -389,13 +369,11 @@ def aggregate( filter_ = _create_model_template_filter( self._view_id, - cog_shop_version, - cog_shop_version_prefix, + version_, + version_prefix, shop_version, shop_version_prefix, watercourse, - source, - source_prefix, external_id_prefix, space, filter, @@ -418,13 +396,11 @@ def histogram( interval: float, query: str | None = None, search_property: ModelTemplateTextFields | Sequence[ModelTemplateTextFields] | None = None, - cog_shop_version: str | list[str] | None = None, - cog_shop_version_prefix: str | None = None, + version_: str | list[str] | None = None, + version_prefix: str | None = None, shop_version: str | list[str] | None = None, shop_version_prefix: str | None = None, watercourse: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - source: str | list[str] | None = None, - source_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -437,13 +413,11 @@ def histogram( interval: The interval to use for the histogram bins. query: The query to search for in the text field. search_property: The text field to search in. - cog_shop_version: The cog shop version to filter on. - cog_shop_version_prefix: The prefix of the cog shop version to filter on. + version_: The version to filter on. + version_prefix: The prefix of the version to filter on. shop_version: The shop version to filter on. shop_version_prefix: The prefix of the shop version to filter on. watercourse: The watercourse to filter on. - source: The source to filter on. - source_prefix: The prefix of the source to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of model templates to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -455,13 +429,11 @@ def histogram( """ filter_ = _create_model_template_filter( self._view_id, - cog_shop_version, - cog_shop_version_prefix, + version_, + version_prefix, shop_version, shop_version_prefix, watercourse, - source, - source_prefix, external_id_prefix, space, filter, @@ -479,13 +451,11 @@ def histogram( def list( self, - cog_shop_version: str | list[str] | None = None, - cog_shop_version_prefix: str | None = None, + version_: str | list[str] | None = None, + version_prefix: str | None = None, shop_version: str | list[str] | None = None, shop_version_prefix: str | None = None, watercourse: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - source: str | list[str] | None = None, - source_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -495,13 +465,11 @@ def list( """List/filter model templates Args: - cog_shop_version: The cog shop version to filter on. - cog_shop_version_prefix: The prefix of the cog shop version to filter on. + version_: The version to filter on. + version_prefix: The prefix of the version to filter on. shop_version: The shop version to filter on. shop_version_prefix: The prefix of the shop version to filter on. watercourse: The watercourse to filter on. - source: The source to filter on. - source_prefix: The prefix of the source to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of model templates to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -522,13 +490,11 @@ def list( """ filter_ = _create_model_template_filter( self._view_id, - cog_shop_version, - cog_shop_version_prefix, + version_, + version_prefix, shop_version, shop_version_prefix, watercourse, - source, - source_prefix, external_id_prefix, space, filter, diff --git a/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix.py b/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix.py index a0fbabebf..2890b45df 100644 --- a/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix.py +++ b/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix.py @@ -34,7 +34,7 @@ QueryBuilder, ) from .multi_scenario_matrix_alerts import MultiScenarioMatrixAlertsAPI -from .multi_scenario_matrix_shop_results import MultiScenarioMatrixShopResultsAPI +from .multi_scenario_matrix_scenario_results import MultiScenarioMatrixScenarioResultsAPI from .multi_scenario_matrix_query import MultiScenarioMatrixQueryAPI @@ -51,7 +51,7 @@ def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainMo ) self._view_id = view_id self.alerts_edge = MultiScenarioMatrixAlertsAPI(client) - self.shop_results_edge = MultiScenarioMatrixShopResultsAPI(client) + self.scenario_results_edge = MultiScenarioMatrixScenarioResultsAPI(client) def __call__( self, @@ -115,7 +115,7 @@ def apply( """Add or update (upsert) multi scenario matrixes. Note: This method iterates through all nodes and timeseries linked to multi_scenario_matrix and creates them including the edges - between the nodes. For example, if any of `alerts` or `shop_results` are set, then these + between the nodes. For example, if any of `alerts` or `scenario_results` are set, then these nodes as well as any nodes linked to them, and all the edges linking these nodes will be created. Args: @@ -223,11 +223,11 @@ def retrieve( dm.ViewId("sp_powerops_models", "Alert", "1"), ), ( - self.shop_results_edge, - "shop_results", - dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.shopResults"), + self.scenario_results_edge, + "scenario_results", + dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.scenarioResults"), "outwards", - dm.ViewId("sp_powerops_models", "SHOPResult", "1"), + dm.ViewId("sp_powerops_models", "PriceProdCase", "1"), ), ], ) @@ -533,7 +533,7 @@ def list( space: The space to filter on. limit: Maximum number of multi scenario matrixes to return. Defaults to 25. Set to -1, float("inf") or None to return all items. filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. - retrieve_edges: Whether to retrieve `alerts` or `shop_results` external ids for the multi scenario matrixes. Defaults to True. + retrieve_edges: Whether to retrieve `alerts` or `scenario_results` external ids for the multi scenario matrixes. Defaults to True. Returns: List of requested multi scenario matrixes @@ -575,11 +575,11 @@ def list( dm.ViewId("sp_powerops_models", "Alert", "1"), ), ( - self.shop_results_edge, - "shop_results", - dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.shopResults"), + self.scenario_results_edge, + "scenario_results", + dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.scenarioResults"), "outwards", - dm.ViewId("sp_powerops_models", "SHOPResult", "1"), + dm.ViewId("sp_powerops_models", "PriceProdCase", "1"), ), ], ) diff --git a/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix_query.py b/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix_query.py index ae583464a..ef5b3a351 100644 --- a/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix_query.py +++ b/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix_query.py @@ -14,7 +14,7 @@ if TYPE_CHECKING: from .alert_query import AlertQueryAPI - from .shop_result_query import SHOPResultQueryAPI + from .price_prod_case_query import PriceProdCaseQueryAPI class MultiScenarioMatrixQueryAPI(QueryAPI[T_DomainModelList]): @@ -85,37 +85,37 @@ def alerts( self._query_append_method(from_) return AlertQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) - def shop_results( + def scenario_results( self, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, retrieve_method: bool = False, - ) -> SHOPResultQueryAPI[T_DomainModelList]: - """Query along the shop result edges of the multi scenario matrix. + ) -> PriceProdCaseQueryAPI[T_DomainModelList]: + """Query along the scenario result edges of the multi scenario matrix. Args: external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. - limit: Maximum number of shop result edges to return. Defaults to 25. Set to -1, float("inf") or None + limit: Maximum number of scenario result edges to return. Defaults to 25. Set to -1, float("inf") or None to return all items. retrieve_method: Whether to retrieve the method for each multi scenario matrix or not. Returns: - SHOPResultQueryAPI: The query API for the shop result. + PriceProdCaseQueryAPI: The query API for the price prod case. """ - from .shop_result_query import SHOPResultQueryAPI + from .price_prod_case_query import PriceProdCaseQueryAPI from_ = self._builder[-1].name edge_filter = _create_edge_filter( - dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.shopResults"), + dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.scenarioResults"), external_id_prefix=external_id_prefix, space=space, ) self._builder.append( QueryStep( - name=self._builder.next_name("shop_results"), + name=self._builder.next_name("scenario_results"), expression=dm.query.EdgeResultSetExpression( filter=edge_filter, from_=from_, @@ -127,7 +127,7 @@ def shop_results( ) if retrieve_method: self._query_append_method(from_) - return SHOPResultQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) + return PriceProdCaseQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) def query( self, diff --git a/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix_scenario_results.py b/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix_scenario_results.py new file mode 100644 index 000000000..dfd2f81d9 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/multi_scenario_matrix_scenario_results.py @@ -0,0 +1,54 @@ +from __future__ import annotations + + +from cognite.client import data_modeling as dm + +from ._core import DEFAULT_LIMIT_READ, EdgeAPI, _create_edge_filter +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE + + +class MultiScenarioMatrixScenarioResultsAPI(EdgeAPI): + def list( + self, + from_multi_scenario_matrix: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + from_multi_scenario_matrix_space: str = DEFAULT_INSTANCE_SPACE, + to_price_prod_case: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + to_price_prod_case_space: str = DEFAULT_INSTANCE_SPACE, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit=DEFAULT_LIMIT_READ, + ) -> dm.EdgeList: + """List scenario result edges of a multi scenario matrix. + + Args: + from_multi_scenario_matrix: ID of the source multi scenario matrix. + from_multi_scenario_matrix_space: Location of the multi scenario matrixes. + to_price_prod_case: ID of the target price prod case. + to_price_prod_case_space: Location of the price prod cases. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of scenario result edges to return. Defaults to 25. Set to -1, float("inf") or None + to return all items. + + Returns: + The requested scenario result edges. + + Examples: + + List 5 scenario result edges connected to "my_multi_scenario_matrix": + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> multi_scenario_matrix = client.multi_scenario_matrix.scenario_results_edge.list("my_multi_scenario_matrix", limit=5) + + """ + filter_ = _create_edge_filter( + dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.scenarioResults"), + from_multi_scenario_matrix, + from_multi_scenario_matrix_space, + to_price_prod_case, + to_price_prod_case_space, + external_id_prefix, + space, + ) + return self._list(filter_=filter_, limit=limit) diff --git a/cognite/powerops/client/_generated/v1/_api/preprocessor_input.py b/cognite/powerops/client/_generated/v1/_api/preprocessor_input.py index 02113e869..e36ce4b4f 100644 --- a/cognite/powerops/client/_generated/v1/_api/preprocessor_input.py +++ b/cognite/powerops/client/_generated/v1/_api/preprocessor_input.py @@ -1,5 +1,6 @@ from __future__ import annotations +import datetime from collections.abc import Sequence from typing import overload import warnings @@ -59,7 +60,11 @@ def __call__( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario_raw: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_shop_start: datetime.date | None = None, + max_shop_start: datetime.date | None = None, + min_shop_end: datetime.date | None = None, + max_shop_end: datetime.date | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -76,7 +81,11 @@ def __call__( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario_raw: The scenario raw to filter on. + scenario: The scenario to filter on. + min_shop_start: The minimum value of the shop start to filter on. + max_shop_start: The maximum value of the shop start to filter on. + min_shop_end: The minimum value of the shop end to filter on. + max_shop_end: The maximum value of the shop end to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of preprocessor inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -97,7 +106,11 @@ def __call__( function_name_prefix, function_call_id, function_call_id_prefix, - scenario_raw, + scenario, + min_shop_start, + max_shop_start, + min_shop_end, + max_shop_end, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -219,7 +232,11 @@ def search( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario_raw: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_shop_start: datetime.date | None = None, + max_shop_start: datetime.date | None = None, + min_shop_end: datetime.date | None = None, + max_shop_end: datetime.date | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -238,7 +255,11 @@ def search( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario_raw: The scenario raw to filter on. + scenario: The scenario to filter on. + min_shop_start: The minimum value of the shop start to filter on. + max_shop_start: The maximum value of the shop start to filter on. + min_shop_end: The minimum value of the shop end to filter on. + max_shop_end: The maximum value of the shop end to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of preprocessor inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -266,7 +287,11 @@ def search( function_name_prefix, function_call_id, function_call_id_prefix, - scenario_raw, + scenario, + min_shop_start, + max_shop_start, + min_shop_end, + max_shop_end, external_id_prefix, space, filter, @@ -294,7 +319,11 @@ def aggregate( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario_raw: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_shop_start: datetime.date | None = None, + max_shop_start: datetime.date | None = None, + min_shop_end: datetime.date | None = None, + max_shop_end: datetime.date | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -322,7 +351,11 @@ def aggregate( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario_raw: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_shop_start: datetime.date | None = None, + max_shop_start: datetime.date | None = None, + min_shop_end: datetime.date | None = None, + max_shop_end: datetime.date | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -349,7 +382,11 @@ def aggregate( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario_raw: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_shop_start: datetime.date | None = None, + max_shop_start: datetime.date | None = None, + min_shop_end: datetime.date | None = None, + max_shop_end: datetime.date | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -371,7 +408,11 @@ def aggregate( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario_raw: The scenario raw to filter on. + scenario: The scenario to filter on. + min_shop_start: The minimum value of the shop start to filter on. + max_shop_start: The maximum value of the shop start to filter on. + min_shop_end: The minimum value of the shop end to filter on. + max_shop_end: The maximum value of the shop end to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of preprocessor inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -400,7 +441,11 @@ def aggregate( function_name_prefix, function_call_id, function_call_id_prefix, - scenario_raw, + scenario, + min_shop_start, + max_shop_start, + min_shop_end, + max_shop_end, external_id_prefix, space, filter, @@ -431,7 +476,11 @@ def histogram( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario_raw: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_shop_start: datetime.date | None = None, + max_shop_start: datetime.date | None = None, + min_shop_end: datetime.date | None = None, + max_shop_end: datetime.date | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -452,7 +501,11 @@ def histogram( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario_raw: The scenario raw to filter on. + scenario: The scenario to filter on. + min_shop_start: The minimum value of the shop start to filter on. + max_shop_start: The maximum value of the shop start to filter on. + min_shop_end: The minimum value of the shop end to filter on. + max_shop_end: The maximum value of the shop end to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of preprocessor inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -472,7 +525,11 @@ def histogram( function_name_prefix, function_call_id, function_call_id_prefix, - scenario_raw, + scenario, + min_shop_start, + max_shop_start, + min_shop_end, + max_shop_end, external_id_prefix, space, filter, @@ -498,7 +555,11 @@ def list( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario_raw: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_shop_start: datetime.date | None = None, + max_shop_start: datetime.date | None = None, + min_shop_end: datetime.date | None = None, + max_shop_end: datetime.date | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -515,7 +576,11 @@ def list( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario_raw: The scenario raw to filter on. + scenario: The scenario to filter on. + min_shop_start: The minimum value of the shop start to filter on. + max_shop_start: The maximum value of the shop start to filter on. + min_shop_end: The minimum value of the shop end to filter on. + max_shop_end: The maximum value of the shop end to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of preprocessor inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -543,7 +608,11 @@ def list( function_name_prefix, function_call_id, function_call_id_prefix, - scenario_raw, + scenario, + min_shop_start, + max_shop_start, + min_shop_end, + max_shop_end, external_id_prefix, space, filter, diff --git a/cognite/powerops/client/_generated/v1/_api/preprocessor_input_query.py b/cognite/powerops/client/_generated/v1/_api/preprocessor_input_query.py index 0fe6f2e0a..449715065 100644 --- a/cognite/powerops/client/_generated/v1/_api/preprocessor_input_query.py +++ b/cognite/powerops/client/_generated/v1/_api/preprocessor_input_query.py @@ -8,7 +8,7 @@ from cognite.powerops.client._generated.v1.data_classes import ( DomainModelCore, PreprocessorInput, - ScenarioRaw, + Scenario, ) from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter @@ -39,35 +39,35 @@ def __init__( def query( self, - retrieve_scenario_raw: bool = False, + retrieve_scenario: bool = False, ) -> T_DomainModelList: """Execute query and return the result. Args: - retrieve_scenario_raw: Whether to retrieve the scenario raw for each preprocessor input or not. + retrieve_scenario: Whether to retrieve the scenario for each preprocessor input or not. Returns: The list of the source nodes of the query. """ from_ = self._builder[-1].name - if retrieve_scenario_raw: - self._query_append_scenario_raw(from_) + if retrieve_scenario: + self._query_append_scenario(from_) return self._query() - def _query_append_scenario_raw(self, from_: str) -> None: - view_id = self._view_by_read_class[ScenarioRaw] + def _query_append_scenario(self, from_: str) -> None: + view_id = self._view_by_read_class[Scenario] self._builder.append( QueryStep( - name=self._builder.next_name("scenario_raw"), + name=self._builder.next_name("scenario"), expression=dm.query.NodeResultSetExpression( filter=dm.filters.HasData(views=[view_id]), from_=from_, - through=self._view_by_read_class[PreprocessorInput].as_property_ref("scenarioRaw"), + through=self._view_by_read_class[PreprocessorInput].as_property_ref("scenario"), direction="outwards", ), select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), max_retrieve_limit=-1, - result_cls=ScenarioRaw, + result_cls=Scenario, ), ) diff --git a/cognite/powerops/client/_generated/v1/_api/preprocessor_output.py b/cognite/powerops/client/_generated/v1/_api/preprocessor_output.py index fec06480c..dea618bbb 100644 --- a/cognite/powerops/client/_generated/v1/_api/preprocessor_output.py +++ b/cognite/powerops/client/_generated/v1/_api/preprocessor_output.py @@ -61,7 +61,7 @@ def __call__( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, input_: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, @@ -79,7 +79,7 @@ def __call__( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario: The scenario to filter on. + case: The case to filter on. input_: The input to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. @@ -101,7 +101,7 @@ def __call__( function_name_prefix, function_call_id, function_call_id_prefix, - scenario, + case, input_, external_id_prefix, space, @@ -241,7 +241,7 @@ def search( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, input_: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, @@ -261,7 +261,7 @@ def search( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario: The scenario to filter on. + case: The case to filter on. input_: The input to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. @@ -290,7 +290,7 @@ def search( function_name_prefix, function_call_id, function_call_id_prefix, - scenario, + case, input_, external_id_prefix, space, @@ -319,7 +319,7 @@ def aggregate( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, input_: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, @@ -348,7 +348,7 @@ def aggregate( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, input_: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, @@ -376,7 +376,7 @@ def aggregate( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, input_: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, @@ -399,7 +399,7 @@ def aggregate( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario: The scenario to filter on. + case: The case to filter on. input_: The input to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. @@ -429,7 +429,7 @@ def aggregate( function_name_prefix, function_call_id, function_call_id_prefix, - scenario, + case, input_, external_id_prefix, space, @@ -461,7 +461,7 @@ def histogram( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, input_: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, @@ -483,7 +483,7 @@ def histogram( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario: The scenario to filter on. + case: The case to filter on. input_: The input to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. @@ -504,7 +504,7 @@ def histogram( function_name_prefix, function_call_id, function_call_id_prefix, - scenario, + case, input_, external_id_prefix, space, @@ -531,7 +531,7 @@ def list( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, input_: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, @@ -550,7 +550,7 @@ def list( function_name_prefix: The prefix of the function name to filter on. function_call_id: The function call id to filter on. function_call_id_prefix: The prefix of the function call id to filter on. - scenario: The scenario to filter on. + case: The case to filter on. input_: The input to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. @@ -580,7 +580,7 @@ def list( function_name_prefix, function_call_id, function_call_id_prefix, - scenario, + case, input_, external_id_prefix, space, diff --git a/cognite/powerops/client/_generated/v1/_api/preprocessor_output_query.py b/cognite/powerops/client/_generated/v1/_api/preprocessor_output_query.py index 91ddd932e..4735e0989 100644 --- a/cognite/powerops/client/_generated/v1/_api/preprocessor_output_query.py +++ b/cognite/powerops/client/_generated/v1/_api/preprocessor_output_query.py @@ -8,7 +8,7 @@ from cognite.powerops.client._generated.v1.data_classes import ( DomainModelCore, PreprocessorOutput, - Scenario, + Case, PreprocessorInput, ) from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter @@ -46,7 +46,7 @@ def alerts( external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, - retrieve_scenario: bool = False, + retrieve_case: bool = False, retrieve_input_: bool = False, ) -> AlertQueryAPI[T_DomainModelList]: """Query along the alert edges of the preprocessor output. @@ -56,7 +56,7 @@ def alerts( space: The space to filter on. limit: Maximum number of alert edges to return. Defaults to 25. Set to -1, float("inf") or None to return all items. - retrieve_scenario: Whether to retrieve the scenario for each preprocessor output or not. + retrieve_case: Whether to retrieve the case for each preprocessor output or not. retrieve_input_: Whether to retrieve the input for each preprocessor output or not. Returns: @@ -83,21 +83,21 @@ def alerts( max_retrieve_limit=limit, ) ) - if retrieve_scenario: - self._query_append_scenario(from_) + if retrieve_case: + self._query_append_case(from_) if retrieve_input_: self._query_append_input_(from_) return AlertQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) def query( self, - retrieve_scenario: bool = False, + retrieve_case: bool = False, retrieve_input_: bool = False, ) -> T_DomainModelList: """Execute query and return the result. Args: - retrieve_scenario: Whether to retrieve the scenario for each preprocessor output or not. + retrieve_case: Whether to retrieve the case for each preprocessor output or not. retrieve_input_: Whether to retrieve the input for each preprocessor output or not. Returns: @@ -105,26 +105,26 @@ def query( """ from_ = self._builder[-1].name - if retrieve_scenario: - self._query_append_scenario(from_) + if retrieve_case: + self._query_append_case(from_) if retrieve_input_: self._query_append_input_(from_) return self._query() - def _query_append_scenario(self, from_: str) -> None: - view_id = self._view_by_read_class[Scenario] + def _query_append_case(self, from_: str) -> None: + view_id = self._view_by_read_class[Case] self._builder.append( QueryStep( - name=self._builder.next_name("scenario"), + name=self._builder.next_name("case"), expression=dm.query.NodeResultSetExpression( filter=dm.filters.HasData(views=[view_id]), from_=from_, - through=self._view_by_read_class[PreprocessorOutput].as_property_ref("scenario"), + through=self._view_by_read_class[PreprocessorOutput].as_property_ref("case"), direction="outwards", ), select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), max_retrieve_limit=-1, - result_cls=Scenario, + result_cls=Case, ), ) diff --git a/cognite/powerops/client/_generated/v1/_api/price_prod_case.py b/cognite/powerops/client/_generated/v1/_api/price_prod_case.py new file mode 100644 index 000000000..a1cb244dd --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/price_prod_case.py @@ -0,0 +1,362 @@ +from __future__ import annotations + +from collections.abc import Sequence +from typing import overload +import warnings + +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes.data_modeling.instances import InstanceAggregationResultList + +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + DomainModelWrite, + ResourcesWriteResult, + PriceProdCase, + PriceProdCaseWrite, + PriceProdCaseFields, + PriceProdCaseList, + PriceProdCaseWriteList, +) +from cognite.powerops.client._generated.v1.data_classes._price_prod_case import ( + _PRICEPRODCASE_PROPERTIES_BY_FIELD, + _create_price_prod_case_filter, +) +from ._core import ( + DEFAULT_LIMIT_READ, + DEFAULT_QUERY_LIMIT, + Aggregations, + NodeAPI, + SequenceNotStr, + QueryStep, + QueryBuilder, +) +from .price_prod_case_price import PriceProdCasePriceAPI +from .price_prod_case_production import PriceProdCaseProductionAPI +from .price_prod_case_query import PriceProdCaseQueryAPI + + +class PriceProdCaseAPI(NodeAPI[PriceProdCase, PriceProdCaseWrite, PriceProdCaseList]): + def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainModelCore], dm.ViewId]): + view_id = view_by_read_class[PriceProdCase] + super().__init__( + client=client, + sources=view_id, + class_type=PriceProdCase, + class_list=PriceProdCaseList, + class_write_list=PriceProdCaseWriteList, + view_by_read_class=view_by_read_class, + ) + self._view_id = view_id + self.price = PriceProdCasePriceAPI(client, view_id) + self.production = PriceProdCaseProductionAPI(client, view_id) + + def __call__( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_QUERY_LIMIT, + filter: dm.Filter | None = None, + ) -> PriceProdCaseQueryAPI[PriceProdCaseList]: + """Query starting at price prod cases. + + Args: + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of price prod cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query API for price prod cases. + + """ + has_data = dm.filters.HasData(views=[self._view_id]) + filter_ = _create_price_prod_case_filter( + self._view_id, + case, + external_id_prefix, + space, + (filter and dm.filters.And(filter, has_data)) or has_data, + ) + builder = QueryBuilder(PriceProdCaseList) + return PriceProdCaseQueryAPI(self._client, builder, self._view_by_read_class, filter_, limit) + + def apply( + self, + price_prod_case: PriceProdCaseWrite | Sequence[PriceProdCaseWrite], + replace: bool = False, + write_none: bool = False, + ) -> ResourcesWriteResult: + """Add or update (upsert) price prod cases. + + Args: + price_prod_case: Price prod case or sequence of price prod cases to upsert. + replace (bool): How do we behave when a property value exists? Do we replace all matching and existing values with the supplied values (true)? + Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. + write_none (bool): This method, will by default, skip properties that are set to None. However, if you want to set properties to None, + you can set this parameter to True. Note this only applies to properties that are nullable. + Returns: + Created instance(s), i.e., nodes, edges, and time series. + + Examples: + + Create a new price_prod_case: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from cognite.powerops.client._generated.v1.data_classes import PriceProdCaseWrite + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case = PriceProdCaseWrite(external_id="my_price_prod_case", ...) + >>> result = client.price_prod_case.apply(price_prod_case) + + """ + warnings.warn( + "The .apply method is deprecated and will be removed in v1.0. " + "Please use the .upsert method on the client instead. This means instead of " + "`my_client.price_prod_case.apply(my_items)` please use `my_client.upsert(my_items)`." + "The motivation is that all apply methods are the same, and having one apply method per API " + " class encourages users to create items in small batches, which is inefficient." + "In addition, .upsert method is more descriptive of what the method does.", + UserWarning, + stacklevel=2, + ) + return self._apply(price_prod_case, replace, write_none) + + def delete( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> dm.InstancesDeleteResult: + """Delete one or more price prod case. + + Args: + external_id: External id of the price prod case to delete. + space: The space where all the price prod case are located. + + Returns: + The instance(s), i.e., nodes and edges which has been deleted. Empty list if nothing was deleted. + + Examples: + + Delete price_prod_case by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> client.price_prod_case.delete("my_price_prod_case") + """ + warnings.warn( + "The .delete method is deprecated and will be removed in v1.0. " + "Please use the .delete method on the client instead. This means instead of " + "`my_client.price_prod_case.delete(my_ids)` please use `my_client.delete(my_ids)`." + "The motivation is that all delete methods are the same, and having one delete method per API " + " class encourages users to delete items in small batches, which is inefficient.", + UserWarning, + stacklevel=2, + ) + return self._delete(external_id, space) + + @overload + def retrieve(self, external_id: str, space: str = DEFAULT_INSTANCE_SPACE) -> PriceProdCase | None: ... + + @overload + def retrieve(self, external_id: SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE) -> PriceProdCaseList: ... + + def retrieve( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> PriceProdCase | PriceProdCaseList | None: + """Retrieve one or more price prod cases by id(s). + + Args: + external_id: External id or list of external ids of the price prod cases. + space: The space where all the price prod cases are located. + + Returns: + The requested price prod cases. + + Examples: + + Retrieve price_prod_case by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case = client.price_prod_case.retrieve("my_price_prod_case") + + """ + return self._retrieve(external_id, space) + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: PriceProdCaseFields | Sequence[PriceProdCaseFields] | None = None, + group_by: None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue]: ... + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: PriceProdCaseFields | Sequence[PriceProdCaseFields] | None = None, + group_by: PriceProdCaseFields | Sequence[PriceProdCaseFields] = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> InstanceAggregationResultList: ... + + def aggregate( + self, + aggregate: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: PriceProdCaseFields | Sequence[PriceProdCaseFields] | None = None, + group_by: PriceProdCaseFields | Sequence[PriceProdCaseFields] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue] | InstanceAggregationResultList: + """Aggregate data across price prod cases + + Args: + aggregate: The aggregation to perform. + property: The property to perform aggregation on. + group_by: The property to group by when doing the aggregation. + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of price prod cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Aggregation results. + + Examples: + + Count price prod cases in space `my_space`: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> result = client.price_prod_case.aggregate("count", space="my_space") + + """ + + filter_ = _create_price_prod_case_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + return self._aggregate( + self._view_id, + aggregate, + _PRICEPRODCASE_PROPERTIES_BY_FIELD, + property, + group_by, + None, + None, + limit, + filter_, + ) + + def histogram( + self, + property: PriceProdCaseFields, + interval: float, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> dm.aggregations.HistogramValue: + """Produces histograms for price prod cases + + Args: + property: The property to use as the value in the histogram. + interval: The interval to use for the histogram bins. + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of price prod cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Bucketed histogram results. + + """ + filter_ = _create_price_prod_case_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + return self._histogram( + self._view_id, + property, + interval, + _PRICEPRODCASE_PROPERTIES_BY_FIELD, + None, + None, + limit, + filter_, + ) + + def list( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> PriceProdCaseList: + """List/filter price prod cases + + Args: + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of price prod cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of requested price prod cases + + Examples: + + List price prod cases and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_cases = client.price_prod_case.list(limit=5) + + """ + filter_ = _create_price_prod_case_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + return self._list(limit=limit, filter=filter_) diff --git a/cognite/powerops/client/_generated/v1/_api/price_prod_case_price.py b/cognite/powerops/client/_generated/v1/_api/price_prod_case_price.py new file mode 100644 index 000000000..e98183533 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/price_prod_case_price.py @@ -0,0 +1,501 @@ +from __future__ import annotations + +import datetime +from collections.abc import Sequence +from typing import Literal + +import pandas as pd +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes import Datapoints, DatapointsArrayList, DatapointsList, TimeSeriesList +from cognite.client.data_classes.datapoints import Aggregate +from cognite.powerops.client._generated.v1.data_classes._price_prod_case import _create_price_prod_case_filter +from ._core import DEFAULT_LIMIT_READ, INSTANCE_QUERY_LIMIT + +ColumnNames = Literal["price", "production"] + + +class PriceProdCasePriceQuery: + def __init__( + self, + client: CogniteClient, + view_id: dm.ViewId, + timeseries_limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ): + self._client = client + self._view_id = view_id + self._timeseries_limit = timeseries_limit + self._filter = filter + + def retrieve( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsList: + """`Retrieve datapoints for the `price_prod_case.price` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_price' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case_datapoints = client.price_prod_case.price(external_id="my_price").retrieve(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsList([]) + + def retrieve_arrays( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsArrayList: + """`Retrieve numpy arrays for the `price_prod_case.price` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsArrayList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_price' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case_datapoints = client.price_prod_case.price(external_id="my_price").retrieve_array(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_arrays( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsArrayList([]) + + def retrieve_dataframe( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "price", + ) -> pd.DataFrame: + """`Retrieve DataFrames for the `price_prod_case.price` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to price + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_price' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case_datapoints = client.price_prod_case.price(external_id="my_price").retrieve_dataframe(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_dataframe_in_tz( + self, + start: datetime.datetime, + end: datetime.datetime, + *, + aggregates: Aggregate | Sequence[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "price", + ) -> pd.DataFrame: + """Retrieve DataFrames for the `price_prod_case.price` timeseries in Timezone. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. + end: Exclusive end + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to price + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + get weekly aggregates for the 'my_price' for the first month of 2023 in Oslo time: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from datetime import datetime, timezone + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case_datapoints = client.price_prod_case.price( + ... external_id="my_price").retrieve_dataframe_in_timezone( + ... datetime(2023, 1, 1, tzinfo=ZoneInfo("Europe/Oslo")), + ... datetime(2023, 1, 2, tzinfo=ZoneInfo("Europe/Oslo")), + ... aggregates="average", + ... granularity="1week", + ... ) + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe_in_tz( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_latest( + self, + before: None | int | str | datetime.datetime = None, + ) -> Datapoints | DatapointsList | None: + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_latest( + external_id=list(external_ids), + before=before, + ) + else: + return None + + def _retrieve_timeseries_external_ids_with_extra( + self, extra_properties: ColumnNames | list[ColumnNames] = "price" + ) -> dict[str, list[str]]: + return _retrieve_timeseries_external_ids_with_extra_price( + self._client, + self._view_id, + self._filter, + self._timeseries_limit, + extra_properties, + ) + + @staticmethod + def _rename_columns( + external_ids: dict[str, list[str]], + df: pd.DataFrame, + column_names: ColumnNames | list[ColumnNames], + include_aggregate_name: bool, + include_granularity_name: bool, + ) -> pd.DataFrame: + if isinstance(column_names, str) and column_names == "price": + return df + splits = sum(included for included in [include_aggregate_name, include_granularity_name]) + if splits == 0: + df.columns = ["-".join(external_ids[external_id]) for external_id in df.columns] + else: + column_parts = (col.rsplit("|", maxsplit=splits) for col in df.columns) + df.columns = [ + "-".join(external_ids[external_id]) + "|" + "|".join(parts) for external_id, *parts in column_parts + ] + return df + + +class PriceProdCasePriceAPI: + def __init__(self, client: CogniteClient, view_id: dm.ViewId): + self._client = client + self._view_id = view_id + + def __call__( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> PriceProdCasePriceQuery: + """Query timeseries `price_prod_case.price` + + Args: + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of price prod cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query object that can be used to retrieve datapoins for the price_prod_case.price timeseries + selected in this method. + + Examples: + + Retrieve all data for 5 price_prod_case.price timeseries: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_cases = client.price_prod_case.price(limit=5).retrieve() + + """ + filter_ = _create_price_prod_case_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + + return PriceProdCasePriceQuery( + client=self._client, + view_id=self._view_id, + timeseries_limit=limit, + filter=filter_, + ) + + def list( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> TimeSeriesList: + """List timeseries `price_prod_case.price` + + Args: + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of price prod cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of Timeseries price_prod_case.price. + + Examples: + + List price_prod_case.price and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_cases = client.price_prod_case.price.list(limit=5) + + """ + filter_ = _create_price_prod_case_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + external_ids = _retrieve_timeseries_external_ids_with_extra_price(self._client, self._view_id, filter_, limit) + if external_ids: + return self._client.time_series.retrieve_multiple(external_ids=list(external_ids)) + else: + return TimeSeriesList([]) + + +def _retrieve_timeseries_external_ids_with_extra_price( + client: CogniteClient, + view_id: dm.ViewId, + filter_: dm.Filter | None, + limit: int, + extra_properties: ColumnNames | list[ColumnNames] = "price", +) -> dict[str, list[str]]: + limit = float("inf") if limit is None or limit == -1 else limit + properties = ["price"] + if extra_properties == "price": + ... + elif isinstance(extra_properties, str) and extra_properties != "price": + properties.append(extra_properties) + elif isinstance(extra_properties, list): + properties.extend([prop for prop in extra_properties if prop != "price"]) + else: + raise ValueError(f"Invalid value for extra_properties: {extra_properties}") + + if isinstance(extra_properties, str): + extra_list = [extra_properties] + else: + extra_list = extra_properties + has_data = dm.filters.HasData(views=[view_id]) + has_property = dm.filters.Exists(property=view_id.as_property_ref("price")) + filter_ = dm.filters.And(filter_, has_data, has_property) if filter_ else dm.filters.And(has_data, has_property) + + cursor = None + external_ids: dict[str, list[str]] = {} + total_retrieved = 0 + while True: + query_limit = max(min(INSTANCE_QUERY_LIMIT, limit - total_retrieved), 0) + selected_nodes = dm.query.NodeResultSetExpression(filter=filter_, limit=query_limit) + query = dm.query.Query( + with_={ + "nodes": selected_nodes, + }, + select={ + "nodes": dm.query.Select( + [dm.query.SourceSelector(view_id, properties)], + ) + }, + cursors={"nodes": cursor}, + ) + result = client.data_modeling.instances.query(query) + batch_external_ids = { + node.properties[view_id]["price"]: [node.properties[view_id].get(prop, "") for prop in extra_list] + for node in result.data["nodes"].data + } + total_retrieved += len(batch_external_ids) + external_ids.update(batch_external_ids) + cursor = result.cursors["nodes"] + if total_retrieved >= limit or cursor is None: + break + return external_ids diff --git a/cognite/powerops/client/_generated/v1/_api/price_prod_case_production.py b/cognite/powerops/client/_generated/v1/_api/price_prod_case_production.py new file mode 100644 index 000000000..4f3395ce0 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/price_prod_case_production.py @@ -0,0 +1,503 @@ +from __future__ import annotations + +import datetime +from collections.abc import Sequence +from typing import Literal + +import pandas as pd +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes import Datapoints, DatapointsArrayList, DatapointsList, TimeSeriesList +from cognite.client.data_classes.datapoints import Aggregate +from cognite.powerops.client._generated.v1.data_classes._price_prod_case import _create_price_prod_case_filter +from ._core import DEFAULT_LIMIT_READ, INSTANCE_QUERY_LIMIT + +ColumnNames = Literal["price", "production"] + + +class PriceProdCaseProductionQuery: + def __init__( + self, + client: CogniteClient, + view_id: dm.ViewId, + timeseries_limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ): + self._client = client + self._view_id = view_id + self._timeseries_limit = timeseries_limit + self._filter = filter + + def retrieve( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsList: + """`Retrieve datapoints for the `price_prod_case.production` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_production' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case_datapoints = client.price_prod_case.production(external_id="my_production").retrieve(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsList([]) + + def retrieve_arrays( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsArrayList: + """`Retrieve numpy arrays for the `price_prod_case.production` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsArrayList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_production' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case_datapoints = client.price_prod_case.production(external_id="my_production").retrieve_array(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_arrays( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsArrayList([]) + + def retrieve_dataframe( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "production", + ) -> pd.DataFrame: + """`Retrieve DataFrames for the `price_prod_case.production` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to production + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_production' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case_datapoints = client.price_prod_case.production(external_id="my_production").retrieve_dataframe(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_dataframe_in_tz( + self, + start: datetime.datetime, + end: datetime.datetime, + *, + aggregates: Aggregate | Sequence[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "production", + ) -> pd.DataFrame: + """Retrieve DataFrames for the `price_prod_case.production` timeseries in Timezone. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. + end: Exclusive end + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to production + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + get weekly aggregates for the 'my_production' for the first month of 2023 in Oslo time: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from datetime import datetime, timezone + >>> client = PowerOpsModelsV1Client() + >>> price_prod_case_datapoints = client.price_prod_case.production( + ... external_id="my_production").retrieve_dataframe_in_timezone( + ... datetime(2023, 1, 1, tzinfo=ZoneInfo("Europe/Oslo")), + ... datetime(2023, 1, 2, tzinfo=ZoneInfo("Europe/Oslo")), + ... aggregates="average", + ... granularity="1week", + ... ) + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe_in_tz( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_latest( + self, + before: None | int | str | datetime.datetime = None, + ) -> Datapoints | DatapointsList | None: + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_latest( + external_id=list(external_ids), + before=before, + ) + else: + return None + + def _retrieve_timeseries_external_ids_with_extra( + self, extra_properties: ColumnNames | list[ColumnNames] = "production" + ) -> dict[str, list[str]]: + return _retrieve_timeseries_external_ids_with_extra_production( + self._client, + self._view_id, + self._filter, + self._timeseries_limit, + extra_properties, + ) + + @staticmethod + def _rename_columns( + external_ids: dict[str, list[str]], + df: pd.DataFrame, + column_names: ColumnNames | list[ColumnNames], + include_aggregate_name: bool, + include_granularity_name: bool, + ) -> pd.DataFrame: + if isinstance(column_names, str) and column_names == "production": + return df + splits = sum(included for included in [include_aggregate_name, include_granularity_name]) + if splits == 0: + df.columns = ["-".join(external_ids[external_id]) for external_id in df.columns] + else: + column_parts = (col.rsplit("|", maxsplit=splits) for col in df.columns) + df.columns = [ + "-".join(external_ids[external_id]) + "|" + "|".join(parts) for external_id, *parts in column_parts + ] + return df + + +class PriceProdCaseProductionAPI: + def __init__(self, client: CogniteClient, view_id: dm.ViewId): + self._client = client + self._view_id = view_id + + def __call__( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> PriceProdCaseProductionQuery: + """Query timeseries `price_prod_case.production` + + Args: + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of price prod cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query object that can be used to retrieve datapoins for the price_prod_case.production timeseries + selected in this method. + + Examples: + + Retrieve all data for 5 price_prod_case.production timeseries: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_cases = client.price_prod_case.production(limit=5).retrieve() + + """ + filter_ = _create_price_prod_case_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + + return PriceProdCaseProductionQuery( + client=self._client, + view_id=self._view_id, + timeseries_limit=limit, + filter=filter_, + ) + + def list( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> TimeSeriesList: + """List timeseries `price_prod_case.production` + + Args: + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of price prod cases to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of Timeseries price_prod_case.production. + + Examples: + + List price_prod_case.production and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> price_prod_cases = client.price_prod_case.production.list(limit=5) + + """ + filter_ = _create_price_prod_case_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + external_ids = _retrieve_timeseries_external_ids_with_extra_production( + self._client, self._view_id, filter_, limit + ) + if external_ids: + return self._client.time_series.retrieve_multiple(external_ids=list(external_ids)) + else: + return TimeSeriesList([]) + + +def _retrieve_timeseries_external_ids_with_extra_production( + client: CogniteClient, + view_id: dm.ViewId, + filter_: dm.Filter | None, + limit: int, + extra_properties: ColumnNames | list[ColumnNames] = "production", +) -> dict[str, list[str]]: + limit = float("inf") if limit is None or limit == -1 else limit + properties = ["production"] + if extra_properties == "production": + ... + elif isinstance(extra_properties, str) and extra_properties != "production": + properties.append(extra_properties) + elif isinstance(extra_properties, list): + properties.extend([prop for prop in extra_properties if prop != "production"]) + else: + raise ValueError(f"Invalid value for extra_properties: {extra_properties}") + + if isinstance(extra_properties, str): + extra_list = [extra_properties] + else: + extra_list = extra_properties + has_data = dm.filters.HasData(views=[view_id]) + has_property = dm.filters.Exists(property=view_id.as_property_ref("production")) + filter_ = dm.filters.And(filter_, has_data, has_property) if filter_ else dm.filters.And(has_data, has_property) + + cursor = None + external_ids: dict[str, list[str]] = {} + total_retrieved = 0 + while True: + query_limit = max(min(INSTANCE_QUERY_LIMIT, limit - total_retrieved), 0) + selected_nodes = dm.query.NodeResultSetExpression(filter=filter_, limit=query_limit) + query = dm.query.Query( + with_={ + "nodes": selected_nodes, + }, + select={ + "nodes": dm.query.Select( + [dm.query.SourceSelector(view_id, properties)], + ) + }, + cursors={"nodes": cursor}, + ) + result = client.data_modeling.instances.query(query) + batch_external_ids = { + node.properties[view_id]["production"]: [node.properties[view_id].get(prop, "") for prop in extra_list] + for node in result.data["nodes"].data + } + total_retrieved += len(batch_external_ids) + external_ids.update(batch_external_ids) + cursor = result.cursors["nodes"] + if total_retrieved >= limit or cursor is None: + break + return external_ids diff --git a/cognite/powerops/client/_generated/v1/_api/price_prod_case_query.py b/cognite/powerops/client/_generated/v1/_api/price_prod_case_query.py new file mode 100644 index 000000000..7e8a6222c --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/price_prod_case_query.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +import datetime +from typing import TYPE_CHECKING + +from cognite.client import data_modeling as dm, CogniteClient + +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + PriceProdCase, + Case, +) +from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter + + +class PriceProdCaseQueryAPI(QueryAPI[T_DomainModelList]): + def __init__( + self, + client: CogniteClient, + builder: QueryBuilder[T_DomainModelList], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId], + filter_: dm.filters.Filter | None = None, + limit: int = DEFAULT_QUERY_LIMIT, + ): + super().__init__(client, builder, view_by_read_class) + + self._builder.append( + QueryStep( + name=self._builder.next_name("price_prod_case"), + expression=dm.query.NodeResultSetExpression( + from_=self._builder[-1].name if self._builder else None, + filter=filter_, + ), + select=dm.query.Select([dm.query.SourceSelector(self._view_by_read_class[PriceProdCase], ["*"])]), + result_cls=PriceProdCase, + max_retrieve_limit=limit, + ) + ) + + def query( + self, + retrieve_case: bool = False, + ) -> T_DomainModelList: + """Execute query and return the result. + + Args: + retrieve_case: Whether to retrieve the case for each price prod case or not. + + Returns: + The list of the source nodes of the query. + + """ + from_ = self._builder[-1].name + if retrieve_case: + self._query_append_case(from_) + return self._query() + + def _query_append_case(self, from_: str) -> None: + view_id = self._view_by_read_class[Case] + self._builder.append( + QueryStep( + name=self._builder.next_name("case"), + expression=dm.query.NodeResultSetExpression( + filter=dm.filters.HasData(views=[view_id]), + from_=from_, + through=self._view_by_read_class[PriceProdCase].as_property_ref("case"), + direction="outwards", + ), + select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), + max_retrieve_limit=-1, + result_cls=Case, + ), + ) diff --git a/cognite/powerops/client/_generated/v1/_api/scenario.py b/cognite/powerops/client/_generated/v1/_api/scenario.py index fc7f2feec..aeb341110 100644 --- a/cognite/powerops/client/_generated/v1/_api/scenario.py +++ b/cognite/powerops/client/_generated/v1/_api/scenario.py @@ -1,6 +1,5 @@ from __future__ import annotations -import datetime from collections.abc import Sequence from typing import overload import warnings @@ -56,22 +55,10 @@ def __call__( self, name: str | list[str] | None = None, name_prefix: str | None = None, - shop_version: str | list[str] | None = None, - shop_version_prefix: str | None = None, model_template: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + commands: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, source: str | list[str] | None = None, source_prefix: str | None = None, - shop_start_specification: str | list[str] | None = None, - shop_start_specification_prefix: str | None = None, - shop_end_specification: str | list[str] | None = None, - shop_end_specification_prefix: str | None = None, - min_shop_start: datetime.datetime | None = None, - max_shop_start: datetime.datetime | None = None, - min_shop_end: datetime.datetime | None = None, - max_shop_end: datetime.datetime | None = None, - min_bid_date: datetime.date | None = None, - max_bid_date: datetime.date | None = None, - is_ready: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -82,22 +69,10 @@ def __call__( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - shop_version: The shop version to filter on. - shop_version_prefix: The prefix of the shop version to filter on. model_template: The model template to filter on. + commands: The command to filter on. source: The source to filter on. source_prefix: The prefix of the source to filter on. - shop_start_specification: The shop start specification to filter on. - shop_start_specification_prefix: The prefix of the shop start specification to filter on. - shop_end_specification: The shop end specification to filter on. - shop_end_specification_prefix: The prefix of the shop end specification to filter on. - min_shop_start: The minimum value of the shop start to filter on. - max_shop_start: The maximum value of the shop start to filter on. - min_shop_end: The minimum value of the shop end to filter on. - max_shop_end: The maximum value of the shop end to filter on. - min_bid_date: The minimum value of the bid date to filter on. - max_bid_date: The maximum value of the bid date to filter on. - is_ready: The is ready to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -112,22 +87,10 @@ def __call__( self._view_id, name, name_prefix, - shop_version, - shop_version_prefix, model_template, + commands, source, source_prefix, - shop_start_specification, - shop_start_specification_prefix, - shop_end_specification, - shop_end_specification_prefix, - min_shop_start, - max_shop_start, - min_shop_end, - max_shop_end, - min_bid_date, - max_bid_date, - is_ready, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -258,22 +221,10 @@ def search( properties: ScenarioTextFields | Sequence[ScenarioTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - shop_version: str | list[str] | None = None, - shop_version_prefix: str | None = None, model_template: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + commands: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, source: str | list[str] | None = None, source_prefix: str | None = None, - shop_start_specification: str | list[str] | None = None, - shop_start_specification_prefix: str | None = None, - shop_end_specification: str | list[str] | None = None, - shop_end_specification_prefix: str | None = None, - min_shop_start: datetime.datetime | None = None, - max_shop_start: datetime.datetime | None = None, - min_shop_end: datetime.datetime | None = None, - max_shop_end: datetime.datetime | None = None, - min_bid_date: datetime.date | None = None, - max_bid_date: datetime.date | None = None, - is_ready: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -286,22 +237,10 @@ def search( properties: The property to search, if nothing is passed all text fields will be searched. name: The name to filter on. name_prefix: The prefix of the name to filter on. - shop_version: The shop version to filter on. - shop_version_prefix: The prefix of the shop version to filter on. model_template: The model template to filter on. + commands: The command to filter on. source: The source to filter on. source_prefix: The prefix of the source to filter on. - shop_start_specification: The shop start specification to filter on. - shop_start_specification_prefix: The prefix of the shop start specification to filter on. - shop_end_specification: The shop end specification to filter on. - shop_end_specification_prefix: The prefix of the shop end specification to filter on. - min_shop_start: The minimum value of the shop start to filter on. - max_shop_start: The maximum value of the shop start to filter on. - min_shop_end: The minimum value of the shop end to filter on. - max_shop_end: The maximum value of the shop end to filter on. - min_bid_date: The minimum value of the bid date to filter on. - max_bid_date: The maximum value of the bid date to filter on. - is_ready: The is ready to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -323,22 +262,10 @@ def search( self._view_id, name, name_prefix, - shop_version, - shop_version_prefix, model_template, + commands, source, source_prefix, - shop_start_specification, - shop_start_specification_prefix, - shop_end_specification, - shop_end_specification_prefix, - min_shop_start, - max_shop_start, - min_shop_end, - max_shop_end, - min_bid_date, - max_bid_date, - is_ready, external_id_prefix, space, filter, @@ -360,22 +287,10 @@ def aggregate( search_properties: ScenarioTextFields | Sequence[ScenarioTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - shop_version: str | list[str] | None = None, - shop_version_prefix: str | None = None, model_template: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + commands: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, source: str | list[str] | None = None, source_prefix: str | None = None, - shop_start_specification: str | list[str] | None = None, - shop_start_specification_prefix: str | None = None, - shop_end_specification: str | list[str] | None = None, - shop_end_specification_prefix: str | None = None, - min_shop_start: datetime.datetime | None = None, - max_shop_start: datetime.datetime | None = None, - min_shop_end: datetime.datetime | None = None, - max_shop_end: datetime.datetime | None = None, - min_bid_date: datetime.date | None = None, - max_bid_date: datetime.date | None = None, - is_ready: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -397,22 +312,10 @@ def aggregate( search_properties: ScenarioTextFields | Sequence[ScenarioTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - shop_version: str | list[str] | None = None, - shop_version_prefix: str | None = None, model_template: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + commands: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, source: str | list[str] | None = None, source_prefix: str | None = None, - shop_start_specification: str | list[str] | None = None, - shop_start_specification_prefix: str | None = None, - shop_end_specification: str | list[str] | None = None, - shop_end_specification_prefix: str | None = None, - min_shop_start: datetime.datetime | None = None, - max_shop_start: datetime.datetime | None = None, - min_shop_end: datetime.datetime | None = None, - max_shop_end: datetime.datetime | None = None, - min_bid_date: datetime.date | None = None, - max_bid_date: datetime.date | None = None, - is_ready: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -433,22 +336,10 @@ def aggregate( search_property: ScenarioTextFields | Sequence[ScenarioTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - shop_version: str | list[str] | None = None, - shop_version_prefix: str | None = None, model_template: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + commands: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, source: str | list[str] | None = None, source_prefix: str | None = None, - shop_start_specification: str | list[str] | None = None, - shop_start_specification_prefix: str | None = None, - shop_end_specification: str | list[str] | None = None, - shop_end_specification_prefix: str | None = None, - min_shop_start: datetime.datetime | None = None, - max_shop_start: datetime.datetime | None = None, - min_shop_end: datetime.datetime | None = None, - max_shop_end: datetime.datetime | None = None, - min_bid_date: datetime.date | None = None, - max_bid_date: datetime.date | None = None, - is_ready: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -464,22 +355,10 @@ def aggregate( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - shop_version: The shop version to filter on. - shop_version_prefix: The prefix of the shop version to filter on. model_template: The model template to filter on. + commands: The command to filter on. source: The source to filter on. source_prefix: The prefix of the source to filter on. - shop_start_specification: The shop start specification to filter on. - shop_start_specification_prefix: The prefix of the shop start specification to filter on. - shop_end_specification: The shop end specification to filter on. - shop_end_specification_prefix: The prefix of the shop end specification to filter on. - min_shop_start: The minimum value of the shop start to filter on. - max_shop_start: The maximum value of the shop start to filter on. - min_shop_end: The minimum value of the shop end to filter on. - max_shop_end: The maximum value of the shop end to filter on. - min_bid_date: The minimum value of the bid date to filter on. - max_bid_date: The maximum value of the bid date to filter on. - is_ready: The is ready to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -502,22 +381,10 @@ def aggregate( self._view_id, name, name_prefix, - shop_version, - shop_version_prefix, model_template, + commands, source, source_prefix, - shop_start_specification, - shop_start_specification_prefix, - shop_end_specification, - shop_end_specification_prefix, - min_shop_start, - max_shop_start, - min_shop_end, - max_shop_end, - min_bid_date, - max_bid_date, - is_ready, external_id_prefix, space, filter, @@ -542,22 +409,10 @@ def histogram( search_property: ScenarioTextFields | Sequence[ScenarioTextFields] | None = None, name: str | list[str] | None = None, name_prefix: str | None = None, - shop_version: str | list[str] | None = None, - shop_version_prefix: str | None = None, model_template: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + commands: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, source: str | list[str] | None = None, source_prefix: str | None = None, - shop_start_specification: str | list[str] | None = None, - shop_start_specification_prefix: str | None = None, - shop_end_specification: str | list[str] | None = None, - shop_end_specification_prefix: str | None = None, - min_shop_start: datetime.datetime | None = None, - max_shop_start: datetime.datetime | None = None, - min_shop_end: datetime.datetime | None = None, - max_shop_end: datetime.datetime | None = None, - min_bid_date: datetime.date | None = None, - max_bid_date: datetime.date | None = None, - is_ready: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -572,22 +427,10 @@ def histogram( search_property: The text field to search in. name: The name to filter on. name_prefix: The prefix of the name to filter on. - shop_version: The shop version to filter on. - shop_version_prefix: The prefix of the shop version to filter on. model_template: The model template to filter on. + commands: The command to filter on. source: The source to filter on. source_prefix: The prefix of the source to filter on. - shop_start_specification: The shop start specification to filter on. - shop_start_specification_prefix: The prefix of the shop start specification to filter on. - shop_end_specification: The shop end specification to filter on. - shop_end_specification_prefix: The prefix of the shop end specification to filter on. - min_shop_start: The minimum value of the shop start to filter on. - max_shop_start: The maximum value of the shop start to filter on. - min_shop_end: The minimum value of the shop end to filter on. - max_shop_end: The maximum value of the shop end to filter on. - min_bid_date: The minimum value of the bid date to filter on. - max_bid_date: The maximum value of the bid date to filter on. - is_ready: The is ready to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -601,22 +444,10 @@ def histogram( self._view_id, name, name_prefix, - shop_version, - shop_version_prefix, model_template, + commands, source, source_prefix, - shop_start_specification, - shop_start_specification_prefix, - shop_end_specification, - shop_end_specification_prefix, - min_shop_start, - max_shop_start, - min_shop_end, - max_shop_end, - min_bid_date, - max_bid_date, - is_ready, external_id_prefix, space, filter, @@ -636,22 +467,10 @@ def list( self, name: str | list[str] | None = None, name_prefix: str | None = None, - shop_version: str | list[str] | None = None, - shop_version_prefix: str | None = None, model_template: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + commands: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, source: str | list[str] | None = None, source_prefix: str | None = None, - shop_start_specification: str | list[str] | None = None, - shop_start_specification_prefix: str | None = None, - shop_end_specification: str | list[str] | None = None, - shop_end_specification_prefix: str | None = None, - min_shop_start: datetime.datetime | None = None, - max_shop_start: datetime.datetime | None = None, - min_shop_end: datetime.datetime | None = None, - max_shop_end: datetime.datetime | None = None, - min_bid_date: datetime.date | None = None, - max_bid_date: datetime.date | None = None, - is_ready: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -663,22 +482,10 @@ def list( Args: name: The name to filter on. name_prefix: The prefix of the name to filter on. - shop_version: The shop version to filter on. - shop_version_prefix: The prefix of the shop version to filter on. model_template: The model template to filter on. + commands: The command to filter on. source: The source to filter on. source_prefix: The prefix of the source to filter on. - shop_start_specification: The shop start specification to filter on. - shop_start_specification_prefix: The prefix of the shop start specification to filter on. - shop_end_specification: The shop end specification to filter on. - shop_end_specification_prefix: The prefix of the shop end specification to filter on. - min_shop_start: The minimum value of the shop start to filter on. - max_shop_start: The maximum value of the shop start to filter on. - min_shop_end: The minimum value of the shop end to filter on. - max_shop_end: The maximum value of the shop end to filter on. - min_bid_date: The minimum value of the bid date to filter on. - max_bid_date: The maximum value of the bid date to filter on. - is_ready: The is ready to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of scenarios to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -701,22 +508,10 @@ def list( self._view_id, name, name_prefix, - shop_version, - shop_version_prefix, model_template, + commands, source, source_prefix, - shop_start_specification, - shop_start_specification_prefix, - shop_end_specification, - shop_end_specification_prefix, - min_shop_start, - max_shop_start, - min_shop_end, - max_shop_end, - min_bid_date, - max_bid_date, - is_ready, external_id_prefix, space, filter, diff --git a/cognite/powerops/client/_generated/v1/_api/scenario_query.py b/cognite/powerops/client/_generated/v1/_api/scenario_query.py index 063d4cbcd..dd8a1214d 100644 --- a/cognite/powerops/client/_generated/v1/_api/scenario_query.py +++ b/cognite/powerops/client/_generated/v1/_api/scenario_query.py @@ -9,6 +9,7 @@ DomainModelCore, Scenario, ModelTemplate, + Commands, ) from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter @@ -46,6 +47,7 @@ def mappings_override( space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, retrieve_model_template: bool = False, + retrieve_commands: bool = False, ) -> MappingQueryAPI[T_DomainModelList]: """Query along the mappings override edges of the scenario. @@ -55,6 +57,7 @@ def mappings_override( limit: Maximum number of mappings override edges to return. Defaults to 25. Set to -1, float("inf") or None to return all items. retrieve_model_template: Whether to retrieve the model template for each scenario or not. + retrieve_commands: Whether to retrieve the command for each scenario or not. Returns: MappingQueryAPI: The query API for the mapping. @@ -82,16 +85,20 @@ def mappings_override( ) if retrieve_model_template: self._query_append_model_template(from_) + if retrieve_commands: + self._query_append_commands(from_) return MappingQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) def query( self, retrieve_model_template: bool = False, + retrieve_commands: bool = False, ) -> T_DomainModelList: """Execute query and return the result. Args: retrieve_model_template: Whether to retrieve the model template for each scenario or not. + retrieve_commands: Whether to retrieve the command for each scenario or not. Returns: The list of the source nodes of the query. @@ -100,6 +107,8 @@ def query( from_ = self._builder[-1].name if retrieve_model_template: self._query_append_model_template(from_) + if retrieve_commands: + self._query_append_commands(from_) return self._query() def _query_append_model_template(self, from_: str) -> None: @@ -118,3 +127,20 @@ def _query_append_model_template(self, from_: str) -> None: result_cls=ModelTemplate, ), ) + + def _query_append_commands(self, from_: str) -> None: + view_id = self._view_by_read_class[Commands] + self._builder.append( + QueryStep( + name=self._builder.next_name("commands"), + expression=dm.query.NodeResultSetExpression( + filter=dm.filters.HasData(views=[view_id]), + from_=from_, + through=self._view_by_read_class[Scenario].as_property_ref("commands"), + direction="outwards", + ), + select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), + max_retrieve_limit=-1, + result_cls=Commands, + ), + ) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input.py b/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input.py index 250476165..5440a609a 100644 --- a/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input.py +++ b/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input.py @@ -33,8 +33,9 @@ QueryStep, QueryBuilder, ) -from .shop_partial_bid_calculation_input_alerts import ShopPartialBidCalculationInputAlertsAPI -from .shop_partial_bid_calculation_input_shop_results import ShopPartialBidCalculationInputShopResultsAPI +from .shop_partial_bid_calculation_input_shop_result_price_prod import ( + ShopPartialBidCalculationInputShopResultPriceProdAPI, +) from .shop_partial_bid_calculation_input_query import ShopPartialBidCalculationInputQueryAPI @@ -52,8 +53,7 @@ def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainMo view_by_read_class=view_by_read_class, ) self._view_id = view_id - self.alerts_edge = ShopPartialBidCalculationInputAlertsAPI(client) - self.shop_results_edge = ShopPartialBidCalculationInputShopResultsAPI(client) + self.shop_result_price_prod_edge = ShopPartialBidCalculationInputShopResultPriceProdAPI(client) def __call__( self, @@ -67,6 +67,7 @@ def __call__( function_call_id_prefix: str | None = None, plant: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + step_enabled: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -85,6 +86,7 @@ def __call__( function_call_id_prefix: The prefix of the function call id to filter on. plant: The plant to filter on. market_configuration: The market configuration to filter on. + step_enabled: The step enabled to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop partial bid calculation inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -107,6 +109,7 @@ def __call__( function_call_id_prefix, plant, market_configuration, + step_enabled, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -125,7 +128,7 @@ def apply( """Add or update (upsert) shop partial bid calculation inputs. Note: This method iterates through all nodes and timeseries linked to shop_partial_bid_calculation_input and creates them including the edges - between the nodes. For example, if any of `alerts` or `shop_results` are set, then these + between the nodes. For example, if any of `shop_result_price_prod` are set, then these nodes as well as any nodes linked to them, and all the edges linking these nodes will be created. Args: @@ -228,18 +231,11 @@ def retrieve( retrieve_edges=True, edge_api_name_type_direction_view_id_penta=[ ( - self.alerts_edge, - "alerts", - dm.DirectRelationReference("sp_powerops_types", "calculationIssue"), - "outwards", - dm.ViewId("sp_powerops_models", "Alert", "1"), - ), - ( - self.shop_results_edge, - "shop_results", - dm.DirectRelationReference("sp_powerops_types", "SHOPResult"), + self.shop_result_price_prod_edge, + "shop_result_price_prod", + dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd"), "outwards", - dm.ViewId("sp_powerops_models", "SHOPResult", "1"), + dm.ViewId("sp_powerops_models", "SHOPResultPriceProd", "1"), ), ], ) @@ -260,6 +256,7 @@ def search( function_call_id_prefix: str | None = None, plant: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + step_enabled: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -280,6 +277,7 @@ def search( function_call_id_prefix: The prefix of the function call id to filter on. plant: The plant to filter on. market_configuration: The market configuration to filter on. + step_enabled: The step enabled to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop partial bid calculation inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -309,6 +307,7 @@ def search( function_call_id_prefix, plant, market_configuration, + step_enabled, external_id_prefix, space, filter, @@ -342,6 +341,7 @@ def aggregate( function_call_id_prefix: str | None = None, plant: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + step_enabled: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -373,6 +373,7 @@ def aggregate( function_call_id_prefix: str | None = None, plant: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + step_enabled: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -403,6 +404,7 @@ def aggregate( function_call_id_prefix: str | None = None, plant: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + step_enabled: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -426,6 +428,7 @@ def aggregate( function_call_id_prefix: The prefix of the function call id to filter on. plant: The plant to filter on. market_configuration: The market configuration to filter on. + step_enabled: The step enabled to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop partial bid calculation inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -456,6 +459,7 @@ def aggregate( function_call_id_prefix, plant, market_configuration, + step_enabled, external_id_prefix, space, filter, @@ -490,6 +494,7 @@ def histogram( function_call_id_prefix: str | None = None, plant: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + step_enabled: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -512,6 +517,7 @@ def histogram( function_call_id_prefix: The prefix of the function call id to filter on. plant: The plant to filter on. market_configuration: The market configuration to filter on. + step_enabled: The step enabled to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop partial bid calculation inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -533,6 +539,7 @@ def histogram( function_call_id_prefix, plant, market_configuration, + step_enabled, external_id_prefix, space, filter, @@ -560,6 +567,7 @@ def list( function_call_id_prefix: str | None = None, plant: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + step_enabled: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -579,11 +587,12 @@ def list( function_call_id_prefix: The prefix of the function call id to filter on. plant: The plant to filter on. market_configuration: The market configuration to filter on. + step_enabled: The step enabled to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop partial bid calculation inputs to return. Defaults to 25. Set to -1, float("inf") or None to return all items. filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. - retrieve_edges: Whether to retrieve `alerts` or `shop_results` external ids for the shop partial bid calculation inputs. Defaults to True. + retrieve_edges: Whether to retrieve `shop_result_price_prod` external ids for the shop partial bid calculation inputs. Defaults to True. Returns: List of requested shop partial bid calculation inputs @@ -609,6 +618,7 @@ def list( function_call_id_prefix, plant, market_configuration, + step_enabled, external_id_prefix, space, filter, @@ -620,18 +630,11 @@ def list( retrieve_edges=retrieve_edges, edge_api_name_type_direction_view_id_penta=[ ( - self.alerts_edge, - "alerts", - dm.DirectRelationReference("sp_powerops_types", "calculationIssue"), - "outwards", - dm.ViewId("sp_powerops_models", "Alert", "1"), - ), - ( - self.shop_results_edge, - "shop_results", - dm.DirectRelationReference("sp_powerops_types", "SHOPResult"), + self.shop_result_price_prod_edge, + "shop_result_price_prod", + dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd"), "outwards", - dm.ViewId("sp_powerops_models", "SHOPResult", "1"), + dm.ViewId("sp_powerops_models", "SHOPResultPriceProd", "1"), ), ], ) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input_query.py b/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input_query.py index 057c83a92..08e739a2e 100644 --- a/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input_query.py +++ b/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input_query.py @@ -14,8 +14,7 @@ from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter if TYPE_CHECKING: - from .alert_query import AlertQueryAPI - from .shop_result_query import SHOPResultQueryAPI + from .shop_result_price_prod_query import SHOPResultPriceProdQueryAPI class ShopPartialBidCalculationInputQueryAPI(QueryAPI[T_DomainModelList]): @@ -44,39 +43,39 @@ def __init__( ) ) - def alerts( + def shop_result_price_prod( self, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, retrieve_plant: bool = False, retrieve_market_configuration: bool = False, - ) -> AlertQueryAPI[T_DomainModelList]: - """Query along the alert edges of the shop partial bid calculation input. + ) -> SHOPResultPriceProdQueryAPI[T_DomainModelList]: + """Query along the shop result price prod edges of the shop partial bid calculation input. Args: external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. - limit: Maximum number of alert edges to return. Defaults to 25. Set to -1, float("inf") or None + limit: Maximum number of shop result price prod edges to return. Defaults to 25. Set to -1, float("inf") or None to return all items. retrieve_plant: Whether to retrieve the plant for each shop partial bid calculation input or not. retrieve_market_configuration: Whether to retrieve the market configuration for each shop partial bid calculation input or not. Returns: - AlertQueryAPI: The query API for the alert. + SHOPResultPriceProdQueryAPI: The query API for the shop result price prod. """ - from .alert_query import AlertQueryAPI + from .shop_result_price_prod_query import SHOPResultPriceProdQueryAPI from_ = self._builder[-1].name edge_filter = _create_edge_filter( - dm.DirectRelationReference("sp_powerops_types", "calculationIssue"), + dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd"), external_id_prefix=external_id_prefix, space=space, ) self._builder.append( QueryStep( - name=self._builder.next_name("alerts"), + name=self._builder.next_name("shop_result_price_prod"), expression=dm.query.EdgeResultSetExpression( filter=edge_filter, from_=from_, @@ -90,55 +89,7 @@ def alerts( self._query_append_plant(from_) if retrieve_market_configuration: self._query_append_market_configuration(from_) - return AlertQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) - - def shop_results( - self, - external_id_prefix: str | None = None, - space: str | list[str] | None = None, - limit: int | None = DEFAULT_QUERY_LIMIT, - retrieve_plant: bool = False, - retrieve_market_configuration: bool = False, - ) -> SHOPResultQueryAPI[T_DomainModelList]: - """Query along the shop result edges of the shop partial bid calculation input. - - Args: - external_id_prefix: The prefix of the external ID to filter on. - space: The space to filter on. - limit: Maximum number of shop result edges to return. Defaults to 25. Set to -1, float("inf") or None - to return all items. - retrieve_plant: Whether to retrieve the plant for each shop partial bid calculation input or not. - retrieve_market_configuration: Whether to retrieve the market configuration for each shop partial bid calculation input or not. - - Returns: - SHOPResultQueryAPI: The query API for the shop result. - """ - from .shop_result_query import SHOPResultQueryAPI - - from_ = self._builder[-1].name - - edge_filter = _create_edge_filter( - dm.DirectRelationReference("sp_powerops_types", "SHOPResult"), - external_id_prefix=external_id_prefix, - space=space, - ) - self._builder.append( - QueryStep( - name=self._builder.next_name("shop_results"), - expression=dm.query.EdgeResultSetExpression( - filter=edge_filter, - from_=from_, - direction="outwards", - ), - select=dm.query.Select(), - max_retrieve_limit=limit, - ) - ) - if retrieve_plant: - self._query_append_plant(from_) - if retrieve_market_configuration: - self._query_append_market_configuration(from_) - return SHOPResultQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) + return SHOPResultPriceProdQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) def query( self, diff --git a/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input_shop_result_price_prod.py b/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input_shop_result_price_prod.py new file mode 100644 index 000000000..35e22331f --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_partial_bid_calculation_input_shop_result_price_prod.py @@ -0,0 +1,54 @@ +from __future__ import annotations + + +from cognite.client import data_modeling as dm + +from ._core import DEFAULT_LIMIT_READ, EdgeAPI, _create_edge_filter +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE + + +class ShopPartialBidCalculationInputShopResultPriceProdAPI(EdgeAPI): + def list( + self, + from_shop_partial_bid_calculation_input: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + from_shop_partial_bid_calculation_input_space: str = DEFAULT_INSTANCE_SPACE, + to_shop_result_price_prod: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + to_shop_result_price_prod_space: str = DEFAULT_INSTANCE_SPACE, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit=DEFAULT_LIMIT_READ, + ) -> dm.EdgeList: + """List shop result price prod edges of a shop partial bid calculation input. + + Args: + from_shop_partial_bid_calculation_input: ID of the source shop partial bid calculation input. + from_shop_partial_bid_calculation_input_space: Location of the shop partial bid calculation inputs. + to_shop_result_price_prod: ID of the target shop result price prod. + to_shop_result_price_prod_space: Location of the shop result price prods. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop result price prod edges to return. Defaults to 25. Set to -1, float("inf") or None + to return all items. + + Returns: + The requested shop result price prod edges. + + Examples: + + List 5 shop result price prod edges connected to "my_shop_partial_bid_calculation_input": + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_partial_bid_calculation_input = client.shop_partial_bid_calculation_input.shop_result_price_prod_edge.list("my_shop_partial_bid_calculation_input", limit=5) + + """ + filter_ = _create_edge_filter( + dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd"), + from_shop_partial_bid_calculation_input, + from_shop_partial_bid_calculation_input_space, + to_shop_result_price_prod, + to_shop_result_price_prod_space, + external_id_prefix, + space, + ) + return self._list(filter_=filter_, limit=limit) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_result.py b/cognite/powerops/client/_generated/v1/_api/shop_result.py index 5a0c1ae2b..98bc66da3 100644 --- a/cognite/powerops/client/_generated/v1/_api/shop_result.py +++ b/cognite/powerops/client/_generated/v1/_api/shop_result.py @@ -33,8 +33,7 @@ QueryBuilder, ) from .shop_result_alerts import SHOPResultAlertsAPI -from .shop_result_production import SHOPResultProductionAPI -from .shop_result_price import SHOPResultPriceAPI +from .shop_result_output_timeseries import SHOPResultOutputTimeseriesAPI from .shop_result_query import SHOPResultQueryAPI @@ -51,13 +50,11 @@ def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainMo ) self._view_id = view_id self.alerts_edge = SHOPResultAlertsAPI(client) - self.production = SHOPResultProductionAPI(client, view_id) - self.price = SHOPResultPriceAPI(client, view_id) + self.output_timeseries = SHOPResultOutputTimeseriesAPI(client, view_id) def __call__( self, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - price_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, @@ -66,8 +63,7 @@ def __call__( """Query starting at shop results. Args: - scenario: The scenario to filter on. - price_scenario: The price scenario to filter on. + case: The case to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop results to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -80,8 +76,7 @@ def __call__( has_data = dm.filters.HasData(views=[self._view_id]) filter_ = _create_shop_result_filter( self._view_id, - scenario, - price_scenario, + case, external_id_prefix, space, (filter and dm.filters.And(filter, has_data)) or has_data, @@ -217,8 +212,7 @@ def aggregate( ), property: SHOPResultFields | Sequence[SHOPResultFields] | None = None, group_by: None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - price_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -236,8 +230,7 @@ def aggregate( ), property: SHOPResultFields | Sequence[SHOPResultFields] | None = None, group_by: SHOPResultFields | Sequence[SHOPResultFields] = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - price_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -254,8 +247,7 @@ def aggregate( ), property: SHOPResultFields | Sequence[SHOPResultFields] | None = None, group_by: SHOPResultFields | Sequence[SHOPResultFields] | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - price_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -267,8 +259,7 @@ def aggregate( aggregate: The aggregation to perform. property: The property to perform aggregation on. group_by: The property to group by when doing the aggregation. - scenario: The scenario to filter on. - price_scenario: The price scenario to filter on. + case: The case to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop results to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -289,8 +280,7 @@ def aggregate( filter_ = _create_shop_result_filter( self._view_id, - scenario, - price_scenario, + case, external_id_prefix, space, filter, @@ -311,8 +301,7 @@ def histogram( self, property: SHOPResultFields, interval: float, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - price_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -323,8 +312,7 @@ def histogram( Args: property: The property to use as the value in the histogram. interval: The interval to use for the histogram bins. - scenario: The scenario to filter on. - price_scenario: The price scenario to filter on. + case: The case to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop results to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -336,8 +324,7 @@ def histogram( """ filter_ = _create_shop_result_filter( self._view_id, - scenario, - price_scenario, + case, external_id_prefix, space, filter, @@ -355,8 +342,7 @@ def histogram( def list( self, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - price_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_LIMIT_READ, @@ -366,8 +352,7 @@ def list( """List/filter shop results Args: - scenario: The scenario to filter on. - price_scenario: The price scenario to filter on. + case: The case to filter on. external_id_prefix: The prefix of the external ID to filter on. space: The space to filter on. limit: Maximum number of shop results to return. Defaults to 25. Set to -1, float("inf") or None to return all items. @@ -388,8 +373,7 @@ def list( """ filter_ = _create_shop_result_filter( self._view_id, - scenario, - price_scenario, + case, external_id_prefix, space, filter, diff --git a/cognite/powerops/client/_generated/v1/_api/shop_result_output_timeseries.py b/cognite/powerops/client/_generated/v1/_api/shop_result_output_timeseries.py new file mode 100644 index 000000000..5ef9dacd5 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_result_output_timeseries.py @@ -0,0 +1,505 @@ +from __future__ import annotations + +import datetime +from collections.abc import Sequence +from typing import Literal + +import pandas as pd +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes import Datapoints, DatapointsArrayList, DatapointsList, TimeSeriesList +from cognite.client.data_classes.datapoints import Aggregate +from cognite.powerops.client._generated.v1.data_classes._shop_result import _create_shop_result_filter +from ._core import DEFAULT_LIMIT_READ, INSTANCE_QUERY_LIMIT + +ColumnNames = Literal["outputTimeseries", "objectiveSequence", "preRun", "postRun", "shopMessages", "cplexLogs"] + + +class SHOPResultOutputTimeseriesQuery: + def __init__( + self, + client: CogniteClient, + view_id: dm.ViewId, + timeseries_limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ): + self._client = client + self._view_id = view_id + self._timeseries_limit = timeseries_limit + self._filter = filter + + def retrieve( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsList: + """`Retrieve datapoints for the `shop_result.output_timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_output_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_datapoints = client.shop_result.output_timeseries(external_id="my_output_timeseries").retrieve(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsList([]) + + def retrieve_arrays( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsArrayList: + """`Retrieve numpy arrays for the `shop_result.output_timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsArrayList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_output_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_datapoints = client.shop_result.output_timeseries(external_id="my_output_timeseries").retrieve_array(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_arrays( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsArrayList([]) + + def retrieve_dataframe( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "outputTimeseries", + ) -> pd.DataFrame: + """`Retrieve DataFrames for the `shop_result.output_timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to outputTimeseries + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_output_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_datapoints = client.shop_result.output_timeseries(external_id="my_output_timeseries").retrieve_dataframe(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_dataframe_in_tz( + self, + start: datetime.datetime, + end: datetime.datetime, + *, + aggregates: Aggregate | Sequence[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "outputTimeseries", + ) -> pd.DataFrame: + """Retrieve DataFrames for the `shop_result.output_timeseries` timeseries in Timezone. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. + end: Exclusive end + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to outputTimeseries + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + get weekly aggregates for the 'my_output_timeseries' for the first month of 2023 in Oslo time: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from datetime import datetime, timezone + >>> client = PowerOpsModelsV1Client() + >>> shop_result_datapoints = client.shop_result.output_timeseries( + ... external_id="my_output_timeseries").retrieve_dataframe_in_timezone( + ... datetime(2023, 1, 1, tzinfo=ZoneInfo("Europe/Oslo")), + ... datetime(2023, 1, 2, tzinfo=ZoneInfo("Europe/Oslo")), + ... aggregates="average", + ... granularity="1week", + ... ) + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe_in_tz( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_latest( + self, + before: None | int | str | datetime.datetime = None, + ) -> Datapoints | DatapointsList | None: + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_latest( + external_id=list(external_ids), + before=before, + ) + else: + return None + + def _retrieve_timeseries_external_ids_with_extra( + self, extra_properties: ColumnNames | list[ColumnNames] = "outputTimeseries" + ) -> dict[str, list[str]]: + return _retrieve_timeseries_external_ids_with_extra_output_timesery( + self._client, + self._view_id, + self._filter, + self._timeseries_limit, + extra_properties, + ) + + @staticmethod + def _rename_columns( + external_ids: dict[str, list[str]], + df: pd.DataFrame, + column_names: ColumnNames | list[ColumnNames], + include_aggregate_name: bool, + include_granularity_name: bool, + ) -> pd.DataFrame: + if isinstance(column_names, str) and column_names == "outputTimeseries": + return df + splits = sum(included for included in [include_aggregate_name, include_granularity_name]) + if splits == 0: + df.columns = ["-".join(external_ids[external_id]) for external_id in df.columns] + else: + column_parts = (col.rsplit("|", maxsplit=splits) for col in df.columns) + df.columns = [ + "-".join(external_ids[external_id]) + "|" + "|".join(parts) for external_id, *parts in column_parts + ] + return df + + +class SHOPResultOutputTimeseriesAPI: + def __init__(self, client: CogniteClient, view_id: dm.ViewId): + self._client = client + self._view_id = view_id + + def __call__( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> SHOPResultOutputTimeseriesQuery: + """Query timeseries `shop_result.output_timeseries` + + Args: + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop results to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query object that can be used to retrieve datapoins for the shop_result.output_timeseries timeseries + selected in this method. + + Examples: + + Retrieve all data for 5 shop_result.output_timeseries timeseries: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_results = client.shop_result.output_timeseries(limit=5).retrieve() + + """ + filter_ = _create_shop_result_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + + return SHOPResultOutputTimeseriesQuery( + client=self._client, + view_id=self._view_id, + timeseries_limit=limit, + filter=filter_, + ) + + def list( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> TimeSeriesList: + """List timeseries `shop_result.output_timeseries` + + Args: + case: The case to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop results to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of Timeseries shop_result.output_timeseries. + + Examples: + + List shop_result.output_timeseries and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_results = client.shop_result.output_timeseries.list(limit=5) + + """ + filter_ = _create_shop_result_filter( + self._view_id, + case, + external_id_prefix, + space, + filter, + ) + external_ids = _retrieve_timeseries_external_ids_with_extra_output_timesery( + self._client, self._view_id, filter_, limit + ) + if external_ids: + return self._client.time_series.retrieve_multiple(external_ids=list(external_ids)) + else: + return TimeSeriesList([]) + + +def _retrieve_timeseries_external_ids_with_extra_output_timesery( + client: CogniteClient, + view_id: dm.ViewId, + filter_: dm.Filter | None, + limit: int, + extra_properties: ColumnNames | list[ColumnNames] = "outputTimeseries", +) -> dict[str, list[str]]: + limit = float("inf") if limit is None or limit == -1 else limit + properties = ["outputTimeseries"] + if extra_properties == "outputTimeseries": + ... + elif isinstance(extra_properties, str) and extra_properties != "outputTimeseries": + properties.append(extra_properties) + elif isinstance(extra_properties, list): + properties.extend([prop for prop in extra_properties if prop != "outputTimeseries"]) + else: + raise ValueError(f"Invalid value for extra_properties: {extra_properties}") + + if isinstance(extra_properties, str): + extra_list = [extra_properties] + else: + extra_list = extra_properties + has_data = dm.filters.HasData(views=[view_id]) + has_property = dm.filters.Exists(property=view_id.as_property_ref("outputTimeseries")) + filter_ = dm.filters.And(filter_, has_data, has_property) if filter_ else dm.filters.And(has_data, has_property) + + cursor = None + external_ids: dict[str, list[str]] = {} + total_retrieved = 0 + while True: + query_limit = max(min(INSTANCE_QUERY_LIMIT, limit - total_retrieved), 0) + selected_nodes = dm.query.NodeResultSetExpression(filter=filter_, limit=query_limit) + query = dm.query.Query( + with_={ + "nodes": selected_nodes, + }, + select={ + "nodes": dm.query.Select( + [dm.query.SourceSelector(view_id, properties)], + ) + }, + cursors={"nodes": cursor}, + ) + result = client.data_modeling.instances.query(query) + batch_external_ids = { + node.properties[view_id]["outputTimeseries"]: [ + node.properties[view_id].get(prop, "") for prop in extra_list + ] + for node in result.data["nodes"].data + } + total_retrieved += len(batch_external_ids) + external_ids.update(batch_external_ids) + cursor = result.cursors["nodes"] + if total_retrieved >= limit or cursor is None: + break + return external_ids diff --git a/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod.py b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod.py new file mode 100644 index 000000000..f79395eba --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod.py @@ -0,0 +1,427 @@ +from __future__ import annotations + +from collections.abc import Sequence +from typing import overload +import warnings + +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes.data_modeling.instances import InstanceAggregationResultList + +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + DomainModelWrite, + ResourcesWriteResult, + SHOPResultPriceProd, + SHOPResultPriceProdWrite, + SHOPResultPriceProdFields, + SHOPResultPriceProdList, + SHOPResultPriceProdWriteList, +) +from cognite.powerops.client._generated.v1.data_classes._shop_result_price_prod import ( + _SHOPRESULTPRICEPROD_PROPERTIES_BY_FIELD, + _create_shop_result_price_prod_filter, +) +from ._core import ( + DEFAULT_LIMIT_READ, + DEFAULT_QUERY_LIMIT, + Aggregations, + NodeAPI, + SequenceNotStr, + QueryStep, + QueryBuilder, +) +from .shop_result_price_prod_alerts import SHOPResultPriceProdAlertsAPI +from .shop_result_price_prod_production_timeseries import SHOPResultPriceProdProductionTimeseriesAPI +from .shop_result_price_prod_output_timeseries import SHOPResultPriceProdOutputTimeseriesAPI +from .shop_result_price_prod_query import SHOPResultPriceProdQueryAPI + + +class SHOPResultPriceProdAPI(NodeAPI[SHOPResultPriceProd, SHOPResultPriceProdWrite, SHOPResultPriceProdList]): + def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainModelCore], dm.ViewId]): + view_id = view_by_read_class[SHOPResultPriceProd] + super().__init__( + client=client, + sources=view_id, + class_type=SHOPResultPriceProd, + class_list=SHOPResultPriceProdList, + class_write_list=SHOPResultPriceProdWriteList, + view_by_read_class=view_by_read_class, + ) + self._view_id = view_id + self.alerts_edge = SHOPResultPriceProdAlertsAPI(client) + self.production_timeseries_edge = SHOPResultPriceProdProductionTimeseriesAPI(client) + self.output_timeseries = SHOPResultPriceProdOutputTimeseriesAPI(client, view_id) + + def __call__( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_QUERY_LIMIT, + filter: dm.Filter | None = None, + ) -> SHOPResultPriceProdQueryAPI[SHOPResultPriceProdList]: + """Query starting at shop result price prods. + + Args: + case: The case to filter on. + price_timeseries: The price timesery to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop result price prods to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query API for shop result price prods. + + """ + has_data = dm.filters.HasData(views=[self._view_id]) + filter_ = _create_shop_result_price_prod_filter( + self._view_id, + case, + price_timeseries, + external_id_prefix, + space, + (filter and dm.filters.And(filter, has_data)) or has_data, + ) + builder = QueryBuilder(SHOPResultPriceProdList) + return SHOPResultPriceProdQueryAPI(self._client, builder, self._view_by_read_class, filter_, limit) + + def apply( + self, + shop_result_price_prod: SHOPResultPriceProdWrite | Sequence[SHOPResultPriceProdWrite], + replace: bool = False, + write_none: bool = False, + ) -> ResourcesWriteResult: + """Add or update (upsert) shop result price prods. + + Note: This method iterates through all nodes and timeseries linked to shop_result_price_prod and creates them including the edges + between the nodes. For example, if any of `alerts` or `production_timeseries` are set, then these + nodes as well as any nodes linked to them, and all the edges linking these nodes will be created. + + Args: + shop_result_price_prod: Shop result price prod or sequence of shop result price prods to upsert. + replace (bool): How do we behave when a property value exists? Do we replace all matching and existing values with the supplied values (true)? + Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. + write_none (bool): This method, will by default, skip properties that are set to None. However, if you want to set properties to None, + you can set this parameter to True. Note this only applies to properties that are nullable. + Returns: + Created instance(s), i.e., nodes, edges, and time series. + + Examples: + + Create a new shop_result_price_prod: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from cognite.powerops.client._generated.v1.data_classes import SHOPResultPriceProdWrite + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prod = SHOPResultPriceProdWrite(external_id="my_shop_result_price_prod", ...) + >>> result = client.shop_result_price_prod.apply(shop_result_price_prod) + + """ + warnings.warn( + "The .apply method is deprecated and will be removed in v1.0. " + "Please use the .upsert method on the client instead. This means instead of " + "`my_client.shop_result_price_prod.apply(my_items)` please use `my_client.upsert(my_items)`." + "The motivation is that all apply methods are the same, and having one apply method per API " + " class encourages users to create items in small batches, which is inefficient." + "In addition, .upsert method is more descriptive of what the method does.", + UserWarning, + stacklevel=2, + ) + return self._apply(shop_result_price_prod, replace, write_none) + + def delete( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> dm.InstancesDeleteResult: + """Delete one or more shop result price prod. + + Args: + external_id: External id of the shop result price prod to delete. + space: The space where all the shop result price prod are located. + + Returns: + The instance(s), i.e., nodes and edges which has been deleted. Empty list if nothing was deleted. + + Examples: + + Delete shop_result_price_prod by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> client.shop_result_price_prod.delete("my_shop_result_price_prod") + """ + warnings.warn( + "The .delete method is deprecated and will be removed in v1.0. " + "Please use the .delete method on the client instead. This means instead of " + "`my_client.shop_result_price_prod.delete(my_ids)` please use `my_client.delete(my_ids)`." + "The motivation is that all delete methods are the same, and having one delete method per API " + " class encourages users to delete items in small batches, which is inefficient.", + UserWarning, + stacklevel=2, + ) + return self._delete(external_id, space) + + @overload + def retrieve(self, external_id: str, space: str = DEFAULT_INSTANCE_SPACE) -> SHOPResultPriceProd | None: ... + + @overload + def retrieve( + self, external_id: SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> SHOPResultPriceProdList: ... + + def retrieve( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> SHOPResultPriceProd | SHOPResultPriceProdList | None: + """Retrieve one or more shop result price prods by id(s). + + Args: + external_id: External id or list of external ids of the shop result price prods. + space: The space where all the shop result price prods are located. + + Returns: + The requested shop result price prods. + + Examples: + + Retrieve shop_result_price_prod by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prod = client.shop_result_price_prod.retrieve("my_shop_result_price_prod") + + """ + return self._retrieve( + external_id, + space, + retrieve_edges=True, + edge_api_name_type_direction_view_id_penta=[ + ( + self.alerts_edge, + "alerts", + dm.DirectRelationReference("sp_powerops_types", "calculationIssue"), + "outwards", + dm.ViewId("sp_powerops_models", "Alert", "1"), + ), + ( + self.production_timeseries_edge, + "production_timeseries", + dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd.productionTimeseries"), + "outwards", + dm.ViewId("sp_powerops_models", "SHOPTimeSeries", "1"), + ), + ], + ) + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: SHOPResultPriceProdFields | Sequence[SHOPResultPriceProdFields] | None = None, + group_by: None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue]: ... + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: SHOPResultPriceProdFields | Sequence[SHOPResultPriceProdFields] | None = None, + group_by: SHOPResultPriceProdFields | Sequence[SHOPResultPriceProdFields] = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> InstanceAggregationResultList: ... + + def aggregate( + self, + aggregate: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: SHOPResultPriceProdFields | Sequence[SHOPResultPriceProdFields] | None = None, + group_by: SHOPResultPriceProdFields | Sequence[SHOPResultPriceProdFields] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue] | InstanceAggregationResultList: + """Aggregate data across shop result price prods + + Args: + aggregate: The aggregation to perform. + property: The property to perform aggregation on. + group_by: The property to group by when doing the aggregation. + case: The case to filter on. + price_timeseries: The price timesery to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop result price prods to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Aggregation results. + + Examples: + + Count shop result price prods in space `my_space`: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> result = client.shop_result_price_prod.aggregate("count", space="my_space") + + """ + + filter_ = _create_shop_result_price_prod_filter( + self._view_id, + case, + price_timeseries, + external_id_prefix, + space, + filter, + ) + return self._aggregate( + self._view_id, + aggregate, + _SHOPRESULTPRICEPROD_PROPERTIES_BY_FIELD, + property, + group_by, + None, + None, + limit, + filter_, + ) + + def histogram( + self, + property: SHOPResultPriceProdFields, + interval: float, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> dm.aggregations.HistogramValue: + """Produces histograms for shop result price prods + + Args: + property: The property to use as the value in the histogram. + interval: The interval to use for the histogram bins. + case: The case to filter on. + price_timeseries: The price timesery to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop result price prods to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Bucketed histogram results. + + """ + filter_ = _create_shop_result_price_prod_filter( + self._view_id, + case, + price_timeseries, + external_id_prefix, + space, + filter, + ) + return self._histogram( + self._view_id, + property, + interval, + _SHOPRESULTPRICEPROD_PROPERTIES_BY_FIELD, + None, + None, + limit, + filter_, + ) + + def list( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + retrieve_edges: bool = True, + ) -> SHOPResultPriceProdList: + """List/filter shop result price prods + + Args: + case: The case to filter on. + price_timeseries: The price timesery to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop result price prods to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + retrieve_edges: Whether to retrieve `alerts` or `production_timeseries` external ids for the shop result price prods. Defaults to True. + + Returns: + List of requested shop result price prods + + Examples: + + List shop result price prods and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prods = client.shop_result_price_prod.list(limit=5) + + """ + filter_ = _create_shop_result_price_prod_filter( + self._view_id, + case, + price_timeseries, + external_id_prefix, + space, + filter, + ) + + return self._list( + limit=limit, + filter=filter_, + retrieve_edges=retrieve_edges, + edge_api_name_type_direction_view_id_penta=[ + ( + self.alerts_edge, + "alerts", + dm.DirectRelationReference("sp_powerops_types", "calculationIssue"), + "outwards", + dm.ViewId("sp_powerops_models", "Alert", "1"), + ), + ( + self.production_timeseries_edge, + "production_timeseries", + dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd.productionTimeseries"), + "outwards", + dm.ViewId("sp_powerops_models", "SHOPTimeSeries", "1"), + ), + ], + ) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_alerts.py b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_alerts.py new file mode 100644 index 000000000..c39650374 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_alerts.py @@ -0,0 +1,54 @@ +from __future__ import annotations + + +from cognite.client import data_modeling as dm + +from ._core import DEFAULT_LIMIT_READ, EdgeAPI, _create_edge_filter +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE + + +class SHOPResultPriceProdAlertsAPI(EdgeAPI): + def list( + self, + from_shop_result_price_prod: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + from_shop_result_price_prod_space: str = DEFAULT_INSTANCE_SPACE, + to_alert: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + to_alert_space: str = DEFAULT_INSTANCE_SPACE, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit=DEFAULT_LIMIT_READ, + ) -> dm.EdgeList: + """List alert edges of a shop result price prod. + + Args: + from_shop_result_price_prod: ID of the source shop result price prod. + from_shop_result_price_prod_space: Location of the shop result price prods. + to_alert: ID of the target alert. + to_alert_space: Location of the alerts. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of alert edges to return. Defaults to 25. Set to -1, float("inf") or None + to return all items. + + Returns: + The requested alert edges. + + Examples: + + List 5 alert edges connected to "my_shop_result_price_prod": + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prod = client.shop_result_price_prod.alerts_edge.list("my_shop_result_price_prod", limit=5) + + """ + filter_ = _create_edge_filter( + dm.DirectRelationReference("sp_powerops_types", "calculationIssue"), + from_shop_result_price_prod, + from_shop_result_price_prod_space, + to_alert, + to_alert_space, + external_id_prefix, + space, + ) + return self._list(filter_=filter_, limit=limit) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_output_timeseries.py b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_output_timeseries.py new file mode 100644 index 000000000..91fedf36c --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_output_timeseries.py @@ -0,0 +1,513 @@ +from __future__ import annotations + +import datetime +from collections.abc import Sequence +from typing import Literal + +import pandas as pd +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes import Datapoints, DatapointsArrayList, DatapointsList, TimeSeriesList +from cognite.client.data_classes.datapoints import Aggregate +from cognite.powerops.client._generated.v1.data_classes._shop_result_price_prod import ( + _create_shop_result_price_prod_filter, +) +from ._core import DEFAULT_LIMIT_READ, INSTANCE_QUERY_LIMIT + +ColumnNames = Literal["outputTimeseries", "objectiveSequence", "preRun", "postRun", "shopMessages", "cplexLogs"] + + +class SHOPResultPriceProdOutputTimeseriesQuery: + def __init__( + self, + client: CogniteClient, + view_id: dm.ViewId, + timeseries_limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ): + self._client = client + self._view_id = view_id + self._timeseries_limit = timeseries_limit + self._filter = filter + + def retrieve( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsList: + """`Retrieve datapoints for the `shop_result_price_prod.output_timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_output_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prod_datapoints = client.shop_result_price_prod.output_timeseries(external_id="my_output_timeseries").retrieve(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsList([]) + + def retrieve_arrays( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsArrayList: + """`Retrieve numpy arrays for the `shop_result_price_prod.output_timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsArrayList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_output_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prod_datapoints = client.shop_result_price_prod.output_timeseries(external_id="my_output_timeseries").retrieve_array(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_arrays( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsArrayList([]) + + def retrieve_dataframe( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "outputTimeseries", + ) -> pd.DataFrame: + """`Retrieve DataFrames for the `shop_result_price_prod.output_timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to outputTimeseries + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_output_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prod_datapoints = client.shop_result_price_prod.output_timeseries(external_id="my_output_timeseries").retrieve_dataframe(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_dataframe_in_tz( + self, + start: datetime.datetime, + end: datetime.datetime, + *, + aggregates: Aggregate | Sequence[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "outputTimeseries", + ) -> pd.DataFrame: + """Retrieve DataFrames for the `shop_result_price_prod.output_timeseries` timeseries in Timezone. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. + end: Exclusive end + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to outputTimeseries + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + get weekly aggregates for the 'my_output_timeseries' for the first month of 2023 in Oslo time: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from datetime import datetime, timezone + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prod_datapoints = client.shop_result_price_prod.output_timeseries( + ... external_id="my_output_timeseries").retrieve_dataframe_in_timezone( + ... datetime(2023, 1, 1, tzinfo=ZoneInfo("Europe/Oslo")), + ... datetime(2023, 1, 2, tzinfo=ZoneInfo("Europe/Oslo")), + ... aggregates="average", + ... granularity="1week", + ... ) + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe_in_tz( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_latest( + self, + before: None | int | str | datetime.datetime = None, + ) -> Datapoints | DatapointsList | None: + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_latest( + external_id=list(external_ids), + before=before, + ) + else: + return None + + def _retrieve_timeseries_external_ids_with_extra( + self, extra_properties: ColumnNames | list[ColumnNames] = "outputTimeseries" + ) -> dict[str, list[str]]: + return _retrieve_timeseries_external_ids_with_extra_output_timesery( + self._client, + self._view_id, + self._filter, + self._timeseries_limit, + extra_properties, + ) + + @staticmethod + def _rename_columns( + external_ids: dict[str, list[str]], + df: pd.DataFrame, + column_names: ColumnNames | list[ColumnNames], + include_aggregate_name: bool, + include_granularity_name: bool, + ) -> pd.DataFrame: + if isinstance(column_names, str) and column_names == "outputTimeseries": + return df + splits = sum(included for included in [include_aggregate_name, include_granularity_name]) + if splits == 0: + df.columns = ["-".join(external_ids[external_id]) for external_id in df.columns] + else: + column_parts = (col.rsplit("|", maxsplit=splits) for col in df.columns) + df.columns = [ + "-".join(external_ids[external_id]) + "|" + "|".join(parts) for external_id, *parts in column_parts + ] + return df + + +class SHOPResultPriceProdOutputTimeseriesAPI: + def __init__(self, client: CogniteClient, view_id: dm.ViewId): + self._client = client + self._view_id = view_id + + def __call__( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> SHOPResultPriceProdOutputTimeseriesQuery: + """Query timeseries `shop_result_price_prod.output_timeseries` + + Args: + case: The case to filter on. + price_timeseries: The price timesery to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop result price prods to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query object that can be used to retrieve datapoins for the shop_result_price_prod.output_timeseries timeseries + selected in this method. + + Examples: + + Retrieve all data for 5 shop_result_price_prod.output_timeseries timeseries: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prods = client.shop_result_price_prod.output_timeseries(limit=5).retrieve() + + """ + filter_ = _create_shop_result_price_prod_filter( + self._view_id, + case, + price_timeseries, + external_id_prefix, + space, + filter, + ) + + return SHOPResultPriceProdOutputTimeseriesQuery( + client=self._client, + view_id=self._view_id, + timeseries_limit=limit, + filter=filter_, + ) + + def list( + self, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> TimeSeriesList: + """List timeseries `shop_result_price_prod.output_timeseries` + + Args: + case: The case to filter on. + price_timeseries: The price timesery to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop result price prods to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of Timeseries shop_result_price_prod.output_timeseries. + + Examples: + + List shop_result_price_prod.output_timeseries and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prods = client.shop_result_price_prod.output_timeseries.list(limit=5) + + """ + filter_ = _create_shop_result_price_prod_filter( + self._view_id, + case, + price_timeseries, + external_id_prefix, + space, + filter, + ) + external_ids = _retrieve_timeseries_external_ids_with_extra_output_timesery( + self._client, self._view_id, filter_, limit + ) + if external_ids: + return self._client.time_series.retrieve_multiple(external_ids=list(external_ids)) + else: + return TimeSeriesList([]) + + +def _retrieve_timeseries_external_ids_with_extra_output_timesery( + client: CogniteClient, + view_id: dm.ViewId, + filter_: dm.Filter | None, + limit: int, + extra_properties: ColumnNames | list[ColumnNames] = "outputTimeseries", +) -> dict[str, list[str]]: + limit = float("inf") if limit is None or limit == -1 else limit + properties = ["outputTimeseries"] + if extra_properties == "outputTimeseries": + ... + elif isinstance(extra_properties, str) and extra_properties != "outputTimeseries": + properties.append(extra_properties) + elif isinstance(extra_properties, list): + properties.extend([prop for prop in extra_properties if prop != "outputTimeseries"]) + else: + raise ValueError(f"Invalid value for extra_properties: {extra_properties}") + + if isinstance(extra_properties, str): + extra_list = [extra_properties] + else: + extra_list = extra_properties + has_data = dm.filters.HasData(views=[view_id]) + has_property = dm.filters.Exists(property=view_id.as_property_ref("outputTimeseries")) + filter_ = dm.filters.And(filter_, has_data, has_property) if filter_ else dm.filters.And(has_data, has_property) + + cursor = None + external_ids: dict[str, list[str]] = {} + total_retrieved = 0 + while True: + query_limit = max(min(INSTANCE_QUERY_LIMIT, limit - total_retrieved), 0) + selected_nodes = dm.query.NodeResultSetExpression(filter=filter_, limit=query_limit) + query = dm.query.Query( + with_={ + "nodes": selected_nodes, + }, + select={ + "nodes": dm.query.Select( + [dm.query.SourceSelector(view_id, properties)], + ) + }, + cursors={"nodes": cursor}, + ) + result = client.data_modeling.instances.query(query) + batch_external_ids = { + node.properties[view_id]["outputTimeseries"]: [ + node.properties[view_id].get(prop, "") for prop in extra_list + ] + for node in result.data["nodes"].data + } + total_retrieved += len(batch_external_ids) + external_ids.update(batch_external_ids) + cursor = result.cursors["nodes"] + if total_retrieved >= limit or cursor is None: + break + return external_ids diff --git a/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_production_timeseries.py b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_production_timeseries.py new file mode 100644 index 000000000..a128daeb6 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_production_timeseries.py @@ -0,0 +1,54 @@ +from __future__ import annotations + + +from cognite.client import data_modeling as dm + +from ._core import DEFAULT_LIMIT_READ, EdgeAPI, _create_edge_filter +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE + + +class SHOPResultPriceProdProductionTimeseriesAPI(EdgeAPI): + def list( + self, + from_shop_result_price_prod: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + from_shop_result_price_prod_space: str = DEFAULT_INSTANCE_SPACE, + to_shop_time_series: str | list[str] | dm.NodeId | list[dm.NodeId] | None = None, + to_shop_time_series_space: str = DEFAULT_INSTANCE_SPACE, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit=DEFAULT_LIMIT_READ, + ) -> dm.EdgeList: + """List production timesery edges of a shop result price prod. + + Args: + from_shop_result_price_prod: ID of the source shop result price prod. + from_shop_result_price_prod_space: Location of the shop result price prods. + to_shop_time_series: ID of the target shop time series. + to_shop_time_series_space: Location of the shop time series. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of production timesery edges to return. Defaults to 25. Set to -1, float("inf") or None + to return all items. + + Returns: + The requested production timesery edges. + + Examples: + + List 5 production timesery edges connected to "my_shop_result_price_prod": + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_result_price_prod = client.shop_result_price_prod.production_timeseries_edge.list("my_shop_result_price_prod", limit=5) + + """ + filter_ = _create_edge_filter( + dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd.productionTimeseries"), + from_shop_result_price_prod, + from_shop_result_price_prod_space, + to_shop_time_series, + to_shop_time_series_space, + external_id_prefix, + space, + ) + return self._list(filter_=filter_, limit=limit) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_query.py b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_query.py new file mode 100644 index 000000000..681b5fa3d --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_result_price_prod_query.py @@ -0,0 +1,195 @@ +from __future__ import annotations + +import datetime +from typing import TYPE_CHECKING + +from cognite.client import data_modeling as dm, CogniteClient + +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + SHOPResultPriceProd, + Case, + SHOPTimeSeries, +) +from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter + +if TYPE_CHECKING: + from .alert_query import AlertQueryAPI + from .shop_time_series_query import SHOPTimeSeriesQueryAPI + + +class SHOPResultPriceProdQueryAPI(QueryAPI[T_DomainModelList]): + def __init__( + self, + client: CogniteClient, + builder: QueryBuilder[T_DomainModelList], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId], + filter_: dm.filters.Filter | None = None, + limit: int = DEFAULT_QUERY_LIMIT, + ): + super().__init__(client, builder, view_by_read_class) + + self._builder.append( + QueryStep( + name=self._builder.next_name("shop_result_price_prod"), + expression=dm.query.NodeResultSetExpression( + from_=self._builder[-1].name if self._builder else None, + filter=filter_, + ), + select=dm.query.Select([dm.query.SourceSelector(self._view_by_read_class[SHOPResultPriceProd], ["*"])]), + result_cls=SHOPResultPriceProd, + max_retrieve_limit=limit, + ) + ) + + def alerts( + self, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_QUERY_LIMIT, + retrieve_case: bool = False, + retrieve_price_timeseries: bool = False, + ) -> AlertQueryAPI[T_DomainModelList]: + """Query along the alert edges of the shop result price prod. + + Args: + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of alert edges to return. Defaults to 25. Set to -1, float("inf") or None + to return all items. + retrieve_case: Whether to retrieve the case for each shop result price prod or not. + retrieve_price_timeseries: Whether to retrieve the price timesery for each shop result price prod or not. + + Returns: + AlertQueryAPI: The query API for the alert. + """ + from .alert_query import AlertQueryAPI + + from_ = self._builder[-1].name + + edge_filter = _create_edge_filter( + dm.DirectRelationReference("sp_powerops_types", "calculationIssue"), + external_id_prefix=external_id_prefix, + space=space, + ) + self._builder.append( + QueryStep( + name=self._builder.next_name("alerts"), + expression=dm.query.EdgeResultSetExpression( + filter=edge_filter, + from_=from_, + direction="outwards", + ), + select=dm.query.Select(), + max_retrieve_limit=limit, + ) + ) + if retrieve_case: + self._query_append_case(from_) + if retrieve_price_timeseries: + self._query_append_price_timeseries(from_) + return AlertQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) + + def production_timeseries( + self, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_QUERY_LIMIT, + retrieve_case: bool = False, + retrieve_price_timeseries: bool = False, + ) -> SHOPTimeSeriesQueryAPI[T_DomainModelList]: + """Query along the production timesery edges of the shop result price prod. + + Args: + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of production timesery edges to return. Defaults to 25. Set to -1, float("inf") or None + to return all items. + retrieve_case: Whether to retrieve the case for each shop result price prod or not. + retrieve_price_timeseries: Whether to retrieve the price timesery for each shop result price prod or not. + + Returns: + SHOPTimeSeriesQueryAPI: The query API for the shop time series. + """ + from .shop_time_series_query import SHOPTimeSeriesQueryAPI + + from_ = self._builder[-1].name + + edge_filter = _create_edge_filter( + dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd.productionTimeseries"), + external_id_prefix=external_id_prefix, + space=space, + ) + self._builder.append( + QueryStep( + name=self._builder.next_name("production_timeseries"), + expression=dm.query.EdgeResultSetExpression( + filter=edge_filter, + from_=from_, + direction="outwards", + ), + select=dm.query.Select(), + max_retrieve_limit=limit, + ) + ) + if retrieve_case: + self._query_append_case(from_) + if retrieve_price_timeseries: + self._query_append_price_timeseries(from_) + return SHOPTimeSeriesQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) + + def query( + self, + retrieve_case: bool = False, + retrieve_price_timeseries: bool = False, + ) -> T_DomainModelList: + """Execute query and return the result. + + Args: + retrieve_case: Whether to retrieve the case for each shop result price prod or not. + retrieve_price_timeseries: Whether to retrieve the price timesery for each shop result price prod or not. + + Returns: + The list of the source nodes of the query. + + """ + from_ = self._builder[-1].name + if retrieve_case: + self._query_append_case(from_) + if retrieve_price_timeseries: + self._query_append_price_timeseries(from_) + return self._query() + + def _query_append_case(self, from_: str) -> None: + view_id = self._view_by_read_class[Case] + self._builder.append( + QueryStep( + name=self._builder.next_name("case"), + expression=dm.query.NodeResultSetExpression( + filter=dm.filters.HasData(views=[view_id]), + from_=from_, + through=self._view_by_read_class[SHOPResultPriceProd].as_property_ref("case"), + direction="outwards", + ), + select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), + max_retrieve_limit=-1, + result_cls=Case, + ), + ) + + def _query_append_price_timeseries(self, from_: str) -> None: + view_id = self._view_by_read_class[SHOPTimeSeries] + self._builder.append( + QueryStep( + name=self._builder.next_name("price_timeseries"), + expression=dm.query.NodeResultSetExpression( + filter=dm.filters.HasData(views=[view_id]), + from_=from_, + through=self._view_by_read_class[SHOPResultPriceProd].as_property_ref("priceTimeseries"), + direction="outwards", + ), + select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), + max_retrieve_limit=-1, + result_cls=SHOPTimeSeries, + ), + ) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_result_query.py b/cognite/powerops/client/_generated/v1/_api/shop_result_query.py index bd9642b92..624805c93 100644 --- a/cognite/powerops/client/_generated/v1/_api/shop_result_query.py +++ b/cognite/powerops/client/_generated/v1/_api/shop_result_query.py @@ -8,8 +8,7 @@ from cognite.powerops.client._generated.v1.data_classes import ( DomainModelCore, SHOPResult, - Scenario, - PriceScenario, + Case, ) from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter @@ -46,8 +45,7 @@ def alerts( external_id_prefix: str | None = None, space: str | list[str] | None = None, limit: int | None = DEFAULT_QUERY_LIMIT, - retrieve_scenario: bool = False, - retrieve_price_scenario: bool = False, + retrieve_case: bool = False, ) -> AlertQueryAPI[T_DomainModelList]: """Query along the alert edges of the shop result. @@ -56,8 +54,7 @@ def alerts( space: The space to filter on. limit: Maximum number of alert edges to return. Defaults to 25. Set to -1, float("inf") or None to return all items. - retrieve_scenario: Whether to retrieve the scenario for each shop result or not. - retrieve_price_scenario: Whether to retrieve the price scenario for each shop result or not. + retrieve_case: Whether to retrieve the case for each shop result or not. Returns: AlertQueryAPI: The query API for the alert. @@ -83,64 +80,41 @@ def alerts( max_retrieve_limit=limit, ) ) - if retrieve_scenario: - self._query_append_scenario(from_) - if retrieve_price_scenario: - self._query_append_price_scenario(from_) + if retrieve_case: + self._query_append_case(from_) return AlertQueryAPI(self._client, self._builder, self._view_by_read_class, None, limit) def query( self, - retrieve_scenario: bool = False, - retrieve_price_scenario: bool = False, + retrieve_case: bool = False, ) -> T_DomainModelList: """Execute query and return the result. Args: - retrieve_scenario: Whether to retrieve the scenario for each shop result or not. - retrieve_price_scenario: Whether to retrieve the price scenario for each shop result or not. + retrieve_case: Whether to retrieve the case for each shop result or not. Returns: The list of the source nodes of the query. """ from_ = self._builder[-1].name - if retrieve_scenario: - self._query_append_scenario(from_) - if retrieve_price_scenario: - self._query_append_price_scenario(from_) + if retrieve_case: + self._query_append_case(from_) return self._query() - def _query_append_scenario(self, from_: str) -> None: - view_id = self._view_by_read_class[Scenario] + def _query_append_case(self, from_: str) -> None: + view_id = self._view_by_read_class[Case] self._builder.append( QueryStep( - name=self._builder.next_name("scenario"), + name=self._builder.next_name("case"), expression=dm.query.NodeResultSetExpression( filter=dm.filters.HasData(views=[view_id]), from_=from_, - through=self._view_by_read_class[SHOPResult].as_property_ref("scenario"), + through=self._view_by_read_class[SHOPResult].as_property_ref("case"), direction="outwards", ), select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), max_retrieve_limit=-1, - result_cls=Scenario, - ), - ) - - def _query_append_price_scenario(self, from_: str) -> None: - view_id = self._view_by_read_class[PriceScenario] - self._builder.append( - QueryStep( - name=self._builder.next_name("price_scenario"), - expression=dm.query.NodeResultSetExpression( - filter=dm.filters.HasData(views=[view_id]), - from_=from_, - through=self._view_by_read_class[SHOPResult].as_property_ref("price_scenario"), - direction="outwards", - ), - select=dm.query.Select([dm.query.SourceSelector(view_id, ["*"])]), - max_retrieve_limit=-1, - result_cls=PriceScenario, + result_cls=Case, ), ) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_time_series.py b/cognite/powerops/client/_generated/v1/_api/shop_time_series.py new file mode 100644 index 000000000..35853547b --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_time_series.py @@ -0,0 +1,500 @@ +from __future__ import annotations + +from collections.abc import Sequence +from typing import overload +import warnings + +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes.data_modeling.instances import InstanceAggregationResultList + +from cognite.powerops.client._generated.v1.data_classes._core import DEFAULT_INSTANCE_SPACE +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + DomainModelWrite, + ResourcesWriteResult, + SHOPTimeSeries, + SHOPTimeSeriesWrite, + SHOPTimeSeriesFields, + SHOPTimeSeriesList, + SHOPTimeSeriesWriteList, + SHOPTimeSeriesTextFields, +) +from cognite.powerops.client._generated.v1.data_classes._shop_time_series import ( + _SHOPTIMESERIES_PROPERTIES_BY_FIELD, + _create_shop_time_series_filter, +) +from ._core import ( + DEFAULT_LIMIT_READ, + DEFAULT_QUERY_LIMIT, + Aggregations, + NodeAPI, + SequenceNotStr, + QueryStep, + QueryBuilder, +) +from .shop_time_series_timeseries import SHOPTimeSeriesTimeseriesAPI +from .shop_time_series_query import SHOPTimeSeriesQueryAPI + + +class SHOPTimeSeriesAPI(NodeAPI[SHOPTimeSeries, SHOPTimeSeriesWrite, SHOPTimeSeriesList]): + def __init__(self, client: CogniteClient, view_by_read_class: dict[type[DomainModelCore], dm.ViewId]): + view_id = view_by_read_class[SHOPTimeSeries] + super().__init__( + client=client, + sources=view_id, + class_type=SHOPTimeSeries, + class_list=SHOPTimeSeriesList, + class_write_list=SHOPTimeSeriesWriteList, + view_by_read_class=view_by_read_class, + ) + self._view_id = view_id + self.timeseries = SHOPTimeSeriesTimeseriesAPI(client, view_id) + + def __call__( + self, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_QUERY_LIMIT, + filter: dm.Filter | None = None, + ) -> SHOPTimeSeriesQueryAPI[SHOPTimeSeriesList]: + """Query starting at shop time series. + + Args: + object_type: The object type to filter on. + object_type_prefix: The prefix of the object type to filter on. + object_name: The object name to filter on. + object_name_prefix: The prefix of the object name to filter on. + attribute_name: The attribute name to filter on. + attribute_name_prefix: The prefix of the attribute name to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop time series to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query API for shop time series. + + """ + has_data = dm.filters.HasData(views=[self._view_id]) + filter_ = _create_shop_time_series_filter( + self._view_id, + object_type, + object_type_prefix, + object_name, + object_name_prefix, + attribute_name, + attribute_name_prefix, + external_id_prefix, + space, + (filter and dm.filters.And(filter, has_data)) or has_data, + ) + builder = QueryBuilder(SHOPTimeSeriesList) + return SHOPTimeSeriesQueryAPI(self._client, builder, self._view_by_read_class, filter_, limit) + + def apply( + self, + shop_time_series: SHOPTimeSeriesWrite | Sequence[SHOPTimeSeriesWrite], + replace: bool = False, + write_none: bool = False, + ) -> ResourcesWriteResult: + """Add or update (upsert) shop time series. + + Args: + shop_time_series: Shop time series or sequence of shop time series to upsert. + replace (bool): How do we behave when a property value exists? Do we replace all matching and existing values with the supplied values (true)? + Or should we merge in new values for properties together with the existing values (false)? Note: This setting applies for all nodes or edges specified in the ingestion call. + write_none (bool): This method, will by default, skip properties that are set to None. However, if you want to set properties to None, + you can set this parameter to True. Note this only applies to properties that are nullable. + Returns: + Created instance(s), i.e., nodes, edges, and time series. + + Examples: + + Create a new shop_time_series: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from cognite.powerops.client._generated.v1.data_classes import SHOPTimeSeriesWrite + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series = SHOPTimeSeriesWrite(external_id="my_shop_time_series", ...) + >>> result = client.shop_time_series.apply(shop_time_series) + + """ + warnings.warn( + "The .apply method is deprecated and will be removed in v1.0. " + "Please use the .upsert method on the client instead. This means instead of " + "`my_client.shop_time_series.apply(my_items)` please use `my_client.upsert(my_items)`." + "The motivation is that all apply methods are the same, and having one apply method per API " + " class encourages users to create items in small batches, which is inefficient." + "In addition, .upsert method is more descriptive of what the method does.", + UserWarning, + stacklevel=2, + ) + return self._apply(shop_time_series, replace, write_none) + + def delete( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> dm.InstancesDeleteResult: + """Delete one or more shop time series. + + Args: + external_id: External id of the shop time series to delete. + space: The space where all the shop time series are located. + + Returns: + The instance(s), i.e., nodes and edges which has been deleted. Empty list if nothing was deleted. + + Examples: + + Delete shop_time_series by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> client.shop_time_series.delete("my_shop_time_series") + """ + warnings.warn( + "The .delete method is deprecated and will be removed in v1.0. " + "Please use the .delete method on the client instead. This means instead of " + "`my_client.shop_time_series.delete(my_ids)` please use `my_client.delete(my_ids)`." + "The motivation is that all delete methods are the same, and having one delete method per API " + " class encourages users to delete items in small batches, which is inefficient.", + UserWarning, + stacklevel=2, + ) + return self._delete(external_id, space) + + @overload + def retrieve(self, external_id: str, space: str = DEFAULT_INSTANCE_SPACE) -> SHOPTimeSeries | None: ... + + @overload + def retrieve(self, external_id: SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE) -> SHOPTimeSeriesList: ... + + def retrieve( + self, external_id: str | SequenceNotStr[str], space: str = DEFAULT_INSTANCE_SPACE + ) -> SHOPTimeSeries | SHOPTimeSeriesList | None: + """Retrieve one or more shop time series by id(s). + + Args: + external_id: External id or list of external ids of the shop time series. + space: The space where all the shop time series are located. + + Returns: + The requested shop time series. + + Examples: + + Retrieve shop_time_series by id: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series = client.shop_time_series.retrieve("my_shop_time_series") + + """ + return self._retrieve(external_id, space) + + def search( + self, + query: str, + properties: SHOPTimeSeriesTextFields | Sequence[SHOPTimeSeriesTextFields] | None = None, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> SHOPTimeSeriesList: + """Search shop time series + + Args: + query: The search query, + properties: The property to search, if nothing is passed all text fields will be searched. + object_type: The object type to filter on. + object_type_prefix: The prefix of the object type to filter on. + object_name: The object name to filter on. + object_name_prefix: The prefix of the object name to filter on. + attribute_name: The attribute name to filter on. + attribute_name_prefix: The prefix of the attribute name to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop time series to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Search results shop time series matching the query. + + Examples: + + Search for 'my_shop_time_series' in all text properties: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series_list = client.shop_time_series.search('my_shop_time_series') + + """ + filter_ = _create_shop_time_series_filter( + self._view_id, + object_type, + object_type_prefix, + object_name, + object_name_prefix, + attribute_name, + attribute_name_prefix, + external_id_prefix, + space, + filter, + ) + return self._search(self._view_id, query, _SHOPTIMESERIES_PROPERTIES_BY_FIELD, properties, filter_, limit) + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: SHOPTimeSeriesFields | Sequence[SHOPTimeSeriesFields] | None = None, + group_by: None = None, + query: str | None = None, + search_properties: SHOPTimeSeriesTextFields | Sequence[SHOPTimeSeriesTextFields] | None = None, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue]: ... + + @overload + def aggregate( + self, + aggregations: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: SHOPTimeSeriesFields | Sequence[SHOPTimeSeriesFields] | None = None, + group_by: SHOPTimeSeriesFields | Sequence[SHOPTimeSeriesFields] = None, + query: str | None = None, + search_properties: SHOPTimeSeriesTextFields | Sequence[SHOPTimeSeriesTextFields] | None = None, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> InstanceAggregationResultList: ... + + def aggregate( + self, + aggregate: ( + Aggregations + | dm.aggregations.MetricAggregation + | Sequence[Aggregations] + | Sequence[dm.aggregations.MetricAggregation] + ), + property: SHOPTimeSeriesFields | Sequence[SHOPTimeSeriesFields] | None = None, + group_by: SHOPTimeSeriesFields | Sequence[SHOPTimeSeriesFields] | None = None, + query: str | None = None, + search_property: SHOPTimeSeriesTextFields | Sequence[SHOPTimeSeriesTextFields] | None = None, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> list[dm.aggregations.AggregatedNumberedValue] | InstanceAggregationResultList: + """Aggregate data across shop time series + + Args: + aggregate: The aggregation to perform. + property: The property to perform aggregation on. + group_by: The property to group by when doing the aggregation. + query: The query to search for in the text field. + search_property: The text field to search in. + object_type: The object type to filter on. + object_type_prefix: The prefix of the object type to filter on. + object_name: The object name to filter on. + object_name_prefix: The prefix of the object name to filter on. + attribute_name: The attribute name to filter on. + attribute_name_prefix: The prefix of the attribute name to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop time series to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Aggregation results. + + Examples: + + Count shop time series in space `my_space`: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> result = client.shop_time_series.aggregate("count", space="my_space") + + """ + + filter_ = _create_shop_time_series_filter( + self._view_id, + object_type, + object_type_prefix, + object_name, + object_name_prefix, + attribute_name, + attribute_name_prefix, + external_id_prefix, + space, + filter, + ) + return self._aggregate( + self._view_id, + aggregate, + _SHOPTIMESERIES_PROPERTIES_BY_FIELD, + property, + group_by, + query, + search_property, + limit, + filter_, + ) + + def histogram( + self, + property: SHOPTimeSeriesFields, + interval: float, + query: str | None = None, + search_property: SHOPTimeSeriesTextFields | Sequence[SHOPTimeSeriesTextFields] | None = None, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> dm.aggregations.HistogramValue: + """Produces histograms for shop time series + + Args: + property: The property to use as the value in the histogram. + interval: The interval to use for the histogram bins. + query: The query to search for in the text field. + search_property: The text field to search in. + object_type: The object type to filter on. + object_type_prefix: The prefix of the object type to filter on. + object_name: The object name to filter on. + object_name_prefix: The prefix of the object name to filter on. + attribute_name: The attribute name to filter on. + attribute_name_prefix: The prefix of the attribute name to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop time series to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + Bucketed histogram results. + + """ + filter_ = _create_shop_time_series_filter( + self._view_id, + object_type, + object_type_prefix, + object_name, + object_name_prefix, + attribute_name, + attribute_name_prefix, + external_id_prefix, + space, + filter, + ) + return self._histogram( + self._view_id, + property, + interval, + _SHOPTIMESERIES_PROPERTIES_BY_FIELD, + query, + search_property, + limit, + filter_, + ) + + def list( + self, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int | None = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> SHOPTimeSeriesList: + """List/filter shop time series + + Args: + object_type: The object type to filter on. + object_type_prefix: The prefix of the object type to filter on. + object_name: The object name to filter on. + object_name_prefix: The prefix of the object name to filter on. + attribute_name: The attribute name to filter on. + attribute_name_prefix: The prefix of the attribute name to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop time series to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of requested shop time series + + Examples: + + List shop time series and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series_list = client.shop_time_series.list(limit=5) + + """ + filter_ = _create_shop_time_series_filter( + self._view_id, + object_type, + object_type_prefix, + object_name, + object_name_prefix, + attribute_name, + attribute_name_prefix, + external_id_prefix, + space, + filter, + ) + return self._list(limit=limit, filter=filter_) diff --git a/cognite/powerops/client/_generated/v1/_api/shop_time_series_query.py b/cognite/powerops/client/_generated/v1/_api/shop_time_series_query.py new file mode 100644 index 000000000..24e2cbcfe --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_time_series_query.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import datetime +from typing import TYPE_CHECKING + +from cognite.client import data_modeling as dm, CogniteClient + +from cognite.powerops.client._generated.v1.data_classes import ( + DomainModelCore, + SHOPTimeSeries, +) +from ._core import DEFAULT_QUERY_LIMIT, QueryBuilder, QueryStep, QueryAPI, T_DomainModelList, _create_edge_filter + + +class SHOPTimeSeriesQueryAPI(QueryAPI[T_DomainModelList]): + def __init__( + self, + client: CogniteClient, + builder: QueryBuilder[T_DomainModelList], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId], + filter_: dm.filters.Filter | None = None, + limit: int = DEFAULT_QUERY_LIMIT, + ): + super().__init__(client, builder, view_by_read_class) + + self._builder.append( + QueryStep( + name=self._builder.next_name("shop_time_series"), + expression=dm.query.NodeResultSetExpression( + from_=self._builder[-1].name if self._builder else None, + filter=filter_, + ), + select=dm.query.Select([dm.query.SourceSelector(self._view_by_read_class[SHOPTimeSeries], ["*"])]), + result_cls=SHOPTimeSeries, + max_retrieve_limit=limit, + ) + ) + + def query( + self, + ) -> T_DomainModelList: + """Execute query and return the result. + + Returns: + The list of the source nodes of the query. + + """ + return self._query() diff --git a/cognite/powerops/client/_generated/v1/_api/shop_time_series_timeseries.py b/cognite/powerops/client/_generated/v1/_api/shop_time_series_timeseries.py new file mode 100644 index 000000000..a470c04f1 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/_api/shop_time_series_timeseries.py @@ -0,0 +1,533 @@ +from __future__ import annotations + +import datetime +from collections.abc import Sequence +from typing import Literal + +import pandas as pd +from cognite.client import CogniteClient +from cognite.client import data_modeling as dm +from cognite.client.data_classes import Datapoints, DatapointsArrayList, DatapointsList, TimeSeriesList +from cognite.client.data_classes.datapoints import Aggregate +from cognite.powerops.client._generated.v1.data_classes._shop_time_series import _create_shop_time_series_filter +from ._core import DEFAULT_LIMIT_READ, INSTANCE_QUERY_LIMIT + +ColumnNames = Literal["objectType", "objectName", "attributeName", "timeseries"] + + +class SHOPTimeSeriesTimeseriesQuery: + def __init__( + self, + client: CogniteClient, + view_id: dm.ViewId, + timeseries_limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ): + self._client = client + self._view_id = view_id + self._timeseries_limit = timeseries_limit + self._filter = filter + + def retrieve( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsList: + """`Retrieve datapoints for the `shop_time_series.timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series_datapoints = client.shop_time_series.timeseries(external_id="my_timeseries").retrieve(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsList([]) + + def retrieve_arrays( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + ) -> DatapointsArrayList: + """`Retrieve numpy arrays for the `shop_time_series.timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit (int | None): Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points (bool): Whether to include outside points. Not allowed when fetching aggregates. Default: False + + Returns: + A ``DatapointsArrayList`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series_datapoints = client.shop_time_series.timeseries(external_id="my_timeseries").retrieve_array(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_arrays( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + ) + else: + return DatapointsArrayList([]) + + def retrieve_dataframe( + self, + start: int | str | datetime.datetime | None = None, + end: int | str | datetime.datetime | None = None, + *, + aggregates: Aggregate | list[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + limit: int | None = None, + include_outside_points: bool = False, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "timeseries", + ) -> pd.DataFrame: + """`Retrieve DataFrames for the `shop_time_series.timeseries` timeseries. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. Default: 1970-01-01 UTC. + end: Exclusive end. Default: "now" + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to timeseries + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + we are using the time-ago format to get raw data for the 'my_timeseries' from 2 weeks ago up until now:: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series_datapoints = client.shop_time_series.timeseries(external_id="my_timeseries").retrieve_dataframe(start="2w-ago") + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + limit=limit, + include_outside_points=include_outside_points, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_dataframe_in_tz( + self, + start: datetime.datetime, + end: datetime.datetime, + *, + aggregates: Aggregate | Sequence[Aggregate] | None = None, + granularity: str | None = None, + target_unit: str | None = None, + target_unit_system: str | None = None, + uniform_index: bool = False, + include_aggregate_name: bool = True, + include_granularity_name: bool = False, + column_names: ColumnNames | list[ColumnNames] = "timeseries", + ) -> pd.DataFrame: + """Retrieve DataFrames for the `shop_time_series.timeseries` timeseries in Timezone. + + **Performance guide**: + In order to retrieve millions of datapoints as efficiently as possible, here are a few guidelines: + + 1. For the best speed, and significantly lower memory usage, consider using ``retrieve_arrays(...)`` which uses ``numpy.ndarrays`` for data storage. + 2. Only unlimited queries with (``limit=None``) are fetched in parallel, so specifying a large finite ``limit`` like 1 million, comes with severe performance penalty as data is fetched serially. + 3. Try to avoid specifying `start` and `end` to be very far from the actual data: If you had data from 2000 to 2015, don't set start=0 (1970). + + Args: + start: Inclusive start. + end: Exclusive end + aggregates: Single aggregate or list of aggregates to retrieve. Default: None (raw datapoints returned) + granularity The granularity to fetch aggregates at. e.g. '15s', '2h', '10d'. Default: None. + target_unit: The unit_external_id of the data points returned. If the time series does not have an unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. + target_unit_system: The unit system of the data points returned. Cannot be used with target_unit. + limit: Maximum number of datapoints to return for each time series. Default: None (no limit) + include_outside_points: Whether to include outside points. Not allowed when fetching aggregates. Default: False + uniform_index: If only querying aggregates AND a single granularity is used, AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False + include_aggregate_name: Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True + include_granularity_name: Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False + column_names: Which property to use for column names. Defauts to timeseries + + + Returns: + A ``DataFrame`` with the requested datapoints. + + Examples: + + In this example, + get weekly aggregates for the 'my_timeseries' for the first month of 2023 in Oslo time: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> from datetime import datetime, timezone + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series_datapoints = client.shop_time_series.timeseries( + ... external_id="my_timeseries").retrieve_dataframe_in_timezone( + ... datetime(2023, 1, 1, tzinfo=ZoneInfo("Europe/Oslo")), + ... datetime(2023, 1, 2, tzinfo=ZoneInfo("Europe/Oslo")), + ... aggregates="average", + ... granularity="1week", + ... ) + """ + external_ids = self._retrieve_timeseries_external_ids_with_extra(column_names) + if external_ids: + df = self._client.time_series.data.retrieve_dataframe_in_tz( + external_id=list(external_ids), + start=start, + end=end, + aggregates=aggregates, + granularity=granularity, + target_unit=target_unit, + target_unit_system=target_unit_system, + uniform_index=uniform_index, + include_aggregate_name=include_aggregate_name, + include_granularity_name=include_granularity_name, + ) + is_aggregate = aggregates is not None + return self._rename_columns( + external_ids, + df, + column_names, + is_aggregate and include_aggregate_name, + is_aggregate and include_granularity_name, + ) + else: + return pd.DataFrame() + + def retrieve_latest( + self, + before: None | int | str | datetime.datetime = None, + ) -> Datapoints | DatapointsList | None: + external_ids = self._retrieve_timeseries_external_ids_with_extra() + if external_ids: + return self._client.time_series.data.retrieve_latest( + external_id=list(external_ids), + before=before, + ) + else: + return None + + def _retrieve_timeseries_external_ids_with_extra( + self, extra_properties: ColumnNames | list[ColumnNames] = "timeseries" + ) -> dict[str, list[str]]: + return _retrieve_timeseries_external_ids_with_extra_timesery( + self._client, + self._view_id, + self._filter, + self._timeseries_limit, + extra_properties, + ) + + @staticmethod + def _rename_columns( + external_ids: dict[str, list[str]], + df: pd.DataFrame, + column_names: ColumnNames | list[ColumnNames], + include_aggregate_name: bool, + include_granularity_name: bool, + ) -> pd.DataFrame: + if isinstance(column_names, str) and column_names == "timeseries": + return df + splits = sum(included for included in [include_aggregate_name, include_granularity_name]) + if splits == 0: + df.columns = ["-".join(external_ids[external_id]) for external_id in df.columns] + else: + column_parts = (col.rsplit("|", maxsplit=splits) for col in df.columns) + df.columns = [ + "-".join(external_ids[external_id]) + "|" + "|".join(parts) for external_id, *parts in column_parts + ] + return df + + +class SHOPTimeSeriesTimeseriesAPI: + def __init__(self, client: CogniteClient, view_id: dm.ViewId): + self._client = client + self._view_id = view_id + + def __call__( + self, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> SHOPTimeSeriesTimeseriesQuery: + """Query timeseries `shop_time_series.timeseries` + + Args: + object_type: The object type to filter on. + object_type_prefix: The prefix of the object type to filter on. + object_name: The object name to filter on. + object_name_prefix: The prefix of the object name to filter on. + attribute_name: The attribute name to filter on. + attribute_name_prefix: The prefix of the attribute name to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop time series to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + A query object that can be used to retrieve datapoins for the shop_time_series.timeseries timeseries + selected in this method. + + Examples: + + Retrieve all data for 5 shop_time_series.timeseries timeseries: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series_list = client.shop_time_series.timeseries(limit=5).retrieve() + + """ + filter_ = _create_shop_time_series_filter( + self._view_id, + object_type, + object_type_prefix, + object_name, + object_name_prefix, + attribute_name, + attribute_name_prefix, + external_id_prefix, + space, + filter, + ) + + return SHOPTimeSeriesTimeseriesQuery( + client=self._client, + view_id=self._view_id, + timeseries_limit=limit, + filter=filter_, + ) + + def list( + self, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + limit: int = DEFAULT_LIMIT_READ, + filter: dm.Filter | None = None, + ) -> TimeSeriesList: + """List timeseries `shop_time_series.timeseries` + + Args: + object_type: The object type to filter on. + object_type_prefix: The prefix of the object type to filter on. + object_name: The object name to filter on. + object_name_prefix: The prefix of the object name to filter on. + attribute_name: The attribute name to filter on. + attribute_name_prefix: The prefix of the attribute name to filter on. + external_id_prefix: The prefix of the external ID to filter on. + space: The space to filter on. + limit: Maximum number of shop time series to return. Defaults to 25. Set to -1, float("inf") or None to return all items. + filter: (Advanced) If the filtering available in the above is not sufficient, you can write your own filtering which will be ANDed with the filter above. + + Returns: + List of Timeseries shop_time_series.timeseries. + + Examples: + + List shop_time_series.timeseries and limit to 5: + + >>> from cognite.powerops.client._generated.v1 import PowerOpsModelsV1Client + >>> client = PowerOpsModelsV1Client() + >>> shop_time_series_list = client.shop_time_series.timeseries.list(limit=5) + + """ + filter_ = _create_shop_time_series_filter( + self._view_id, + object_type, + object_type_prefix, + object_name, + object_name_prefix, + attribute_name, + attribute_name_prefix, + external_id_prefix, + space, + filter, + ) + external_ids = _retrieve_timeseries_external_ids_with_extra_timesery( + self._client, self._view_id, filter_, limit + ) + if external_ids: + return self._client.time_series.retrieve_multiple(external_ids=list(external_ids)) + else: + return TimeSeriesList([]) + + +def _retrieve_timeseries_external_ids_with_extra_timesery( + client: CogniteClient, + view_id: dm.ViewId, + filter_: dm.Filter | None, + limit: int, + extra_properties: ColumnNames | list[ColumnNames] = "timeseries", +) -> dict[str, list[str]]: + limit = float("inf") if limit is None or limit == -1 else limit + properties = ["timeseries"] + if extra_properties == "timeseries": + ... + elif isinstance(extra_properties, str) and extra_properties != "timeseries": + properties.append(extra_properties) + elif isinstance(extra_properties, list): + properties.extend([prop for prop in extra_properties if prop != "timeseries"]) + else: + raise ValueError(f"Invalid value for extra_properties: {extra_properties}") + + if isinstance(extra_properties, str): + extra_list = [extra_properties] + else: + extra_list = extra_properties + has_data = dm.filters.HasData(views=[view_id]) + has_property = dm.filters.Exists(property=view_id.as_property_ref("timeseries")) + filter_ = dm.filters.And(filter_, has_data, has_property) if filter_ else dm.filters.And(has_data, has_property) + + cursor = None + external_ids: dict[str, list[str]] = {} + total_retrieved = 0 + while True: + query_limit = max(min(INSTANCE_QUERY_LIMIT, limit - total_retrieved), 0) + selected_nodes = dm.query.NodeResultSetExpression(filter=filter_, limit=query_limit) + query = dm.query.Query( + with_={ + "nodes": selected_nodes, + }, + select={ + "nodes": dm.query.Select( + [dm.query.SourceSelector(view_id, properties)], + ) + }, + cursors={"nodes": cursor}, + ) + result = client.data_modeling.instances.query(query) + batch_external_ids = { + node.properties[view_id]["timeseries"]: [node.properties[view_id].get(prop, "") for prop in extra_list] + for node in result.data["nodes"].data + } + total_retrieved += len(batch_external_ids) + external_ids.update(batch_external_ids) + cursor = result.cursors["nodes"] + if total_retrieved >= limit or cursor is None: + break + return external_ids diff --git a/cognite/powerops/client/_generated/v1/_api_client.py b/cognite/powerops/client/_generated/v1/_api_client.py index 7f4d7529a..0a790f516 100644 --- a/cognite/powerops/client/_generated/v1/_api_client.py +++ b/cognite/powerops/client/_generated/v1/_api_client.py @@ -25,6 +25,8 @@ from ._api.bid_method_shop_multi_scenario import BidMethodSHOPMultiScenarioAPI from ._api.bid_method_water_value import BidMethodWaterValueAPI from ._api.bid_row import BidRowAPI +from ._api.case import CaseAPI +from ._api.commands import CommandsAPI from ._api.custom_bid_matrix import CustomBidMatrixAPI from ._api.generator import GeneratorAPI from ._api.generator_efficiency_curve import GeneratorEfficiencyCurveAPI @@ -42,13 +44,14 @@ from ._api.price_area import PriceAreaAPI from ._api.price_area_afrr import PriceAreaAFRRAPI from ._api.price_area_asset import PriceAreaAssetAPI -from ._api.price_scenario import PriceScenarioAPI +from ._api.price_prod_case import PriceProdCaseAPI from ._api.reservoir import ReservoirAPI from ._api.shop_result import SHOPResultAPI +from ._api.shop_result_price_prod import SHOPResultPriceProdAPI +from ._api.shop_time_series import SHOPTimeSeriesAPI from ._api.shop_trigger_input import SHOPTriggerInputAPI from ._api.shop_trigger_output import SHOPTriggerOutputAPI from ._api.scenario import ScenarioAPI -from ._api.scenario_raw import ScenarioRawAPI from ._api.shop_partial_bid_calculation_input import ShopPartialBidCalculationInputAPI from ._api.shop_partial_bid_calculation_output import ShopPartialBidCalculationOutputAPI from ._api.task_dispatcher_shop_input import TaskDispatcherShopInputAPI @@ -84,6 +87,8 @@ def __init__(self, client: CogniteClient): data_classes.BidConfigurationShop: dm.ViewId("sp_powerops_models", "BidConfigurationShop", "1"), data_classes.BidMatrixRaw: dm.ViewId("sp_powerops_models", "BidMatrixRaw", "1"), data_classes.BidMethodSHOPMultiScenario: dm.ViewId("sp_powerops_models", "BidMethodSHOPMultiScenario", "1"), + data_classes.Case: dm.ViewId("sp_powerops_models", "Case", "1"), + data_classes.Commands: dm.ViewId("sp_powerops_models", "Commands", "1"), data_classes.Mapping: dm.ViewId("sp_powerops_models", "Mapping", "1"), data_classes.MarketConfiguration: dm.ViewId("sp_powerops_models", "MarketConfiguration", "1"), data_classes.ModelTemplate: dm.ViewId("sp_powerops_models", "ModelTemplate", "1"), @@ -92,12 +97,13 @@ def __init__(self, client: CogniteClient): data_classes.PreprocessorInput: dm.ViewId("sp_powerops_models", "PreprocessorInput", "1"), data_classes.PreprocessorOutput: dm.ViewId("sp_powerops_models", "PreprocessorOutput", "1"), data_classes.PriceArea: dm.ViewId("sp_powerops_models", "PriceArea", "1"), - data_classes.PriceScenario: dm.ViewId("sp_powerops_models", "PriceScenario", "1"), + data_classes.PriceProdCase: dm.ViewId("sp_powerops_models", "PriceProdCase", "1"), data_classes.SHOPResult: dm.ViewId("sp_powerops_models", "SHOPResult", "1"), + data_classes.SHOPResultPriceProd: dm.ViewId("sp_powerops_models", "SHOPResultPriceProd", "1"), + data_classes.SHOPTimeSeries: dm.ViewId("sp_powerops_models", "SHOPTimeSeries", "1"), data_classes.SHOPTriggerInput: dm.ViewId("sp_powerops_models", "SHOPTriggerInput", "1"), data_classes.SHOPTriggerOutput: dm.ViewId("sp_powerops_models", "SHOPTriggerOutput", "1"), data_classes.Scenario: dm.ViewId("sp_powerops_models", "Scenario", "1"), - data_classes.ScenarioRaw: dm.ViewId("sp_powerops_models", "ScenarioRaw", "1"), data_classes.ShopPartialBidCalculationInput: dm.ViewId( "sp_powerops_models", "ShopPartialBidCalculationInput", "1" ), @@ -114,6 +120,8 @@ def __init__(self, client: CogniteClient): self.bid_configuration_shop = BidConfigurationShopAPI(client, view_by_read_class) self.bid_matrix_raw = BidMatrixRawAPI(client, view_by_read_class) self.bid_method_shop_multi_scenario = BidMethodSHOPMultiScenarioAPI(client, view_by_read_class) + self.case = CaseAPI(client, view_by_read_class) + self.commands = CommandsAPI(client, view_by_read_class) self.mapping = MappingAPI(client, view_by_read_class) self.market_configuration = MarketConfigurationAPI(client, view_by_read_class) self.model_template = ModelTemplateAPI(client, view_by_read_class) @@ -122,12 +130,13 @@ def __init__(self, client: CogniteClient): self.preprocessor_input = PreprocessorInputAPI(client, view_by_read_class) self.preprocessor_output = PreprocessorOutputAPI(client, view_by_read_class) self.price_area = PriceAreaAPI(client, view_by_read_class) - self.price_scenario = PriceScenarioAPI(client, view_by_read_class) + self.price_prod_case = PriceProdCaseAPI(client, view_by_read_class) self.shop_result = SHOPResultAPI(client, view_by_read_class) + self.shop_result_price_prod = SHOPResultPriceProdAPI(client, view_by_read_class) + self.shop_time_series = SHOPTimeSeriesAPI(client, view_by_read_class) self.shop_trigger_input = SHOPTriggerInputAPI(client, view_by_read_class) self.shop_trigger_output = SHOPTriggerOutputAPI(client, view_by_read_class) self.scenario = ScenarioAPI(client, view_by_read_class) - self.scenario_raw = ScenarioRawAPI(client, view_by_read_class) self.shop_partial_bid_calculation_input = ShopPartialBidCalculationInputAPI(client, view_by_read_class) self.shop_partial_bid_calculation_output = ShopPartialBidCalculationOutputAPI(client, view_by_read_class) self.task_dispatcher_shop_input = TaskDispatcherShopInputAPI(client, view_by_read_class) @@ -155,6 +164,8 @@ def __init__(self, client: CogniteClient): data_classes.BidMethodDayAhead: dm.ViewId("sp_powerops_models", "BidMethodDayAhead", "1"), data_classes.BidMethodSHOPMultiScenario: dm.ViewId("sp_powerops_models", "BidMethodSHOPMultiScenario", "1"), data_classes.BidMethodWaterValue: dm.ViewId("sp_powerops_models", "BidMethodWaterValue", "1"), + data_classes.Case: dm.ViewId("sp_powerops_models", "Case", "1"), + data_classes.Commands: dm.ViewId("sp_powerops_models", "Commands", "1"), data_classes.Mapping: dm.ViewId("sp_powerops_models", "Mapping", "1"), data_classes.MarketConfiguration: dm.ViewId("sp_powerops_models", "MarketConfiguration", "1"), data_classes.ModelTemplate: dm.ViewId("sp_powerops_models", "ModelTemplate", "1"), @@ -164,8 +175,10 @@ def __init__(self, client: CogniteClient): "sp_powerops_models", "PartialPostProcessingOutput", "1" ), data_classes.PriceArea: dm.ViewId("sp_powerops_models", "PriceArea", "1"), - data_classes.PriceScenario: dm.ViewId("sp_powerops_models", "PriceScenario", "1"), + data_classes.PriceProdCase: dm.ViewId("sp_powerops_models", "PriceProdCase", "1"), data_classes.SHOPResult: dm.ViewId("sp_powerops_models", "SHOPResult", "1"), + data_classes.SHOPResultPriceProd: dm.ViewId("sp_powerops_models", "SHOPResultPriceProd", "1"), + data_classes.SHOPTimeSeries: dm.ViewId("sp_powerops_models", "SHOPTimeSeries", "1"), data_classes.Scenario: dm.ViewId("sp_powerops_models", "Scenario", "1"), data_classes.TotalBidMatrixCalculationInput: dm.ViewId( "sp_powerops_models", "TotalBidMatrixCalculationInput", "1" @@ -184,6 +197,8 @@ def __init__(self, client: CogniteClient): self.bid_method_day_ahead = BidMethodDayAheadAPI(client, view_by_read_class) self.bid_method_shop_multi_scenario = BidMethodSHOPMultiScenarioAPI(client, view_by_read_class) self.bid_method_water_value = BidMethodWaterValueAPI(client, view_by_read_class) + self.case = CaseAPI(client, view_by_read_class) + self.commands = CommandsAPI(client, view_by_read_class) self.mapping = MappingAPI(client, view_by_read_class) self.market_configuration = MarketConfigurationAPI(client, view_by_read_class) self.model_template = ModelTemplateAPI(client, view_by_read_class) @@ -191,8 +206,10 @@ def __init__(self, client: CogniteClient): self.partial_post_processing_input = PartialPostProcessingInputAPI(client, view_by_read_class) self.partial_post_processing_output = PartialPostProcessingOutputAPI(client, view_by_read_class) self.price_area = PriceAreaAPI(client, view_by_read_class) - self.price_scenario = PriceScenarioAPI(client, view_by_read_class) + self.price_prod_case = PriceProdCaseAPI(client, view_by_read_class) self.shop_result = SHOPResultAPI(client, view_by_read_class) + self.shop_result_price_prod = SHOPResultPriceProdAPI(client, view_by_read_class) + self.shop_time_series = SHOPTimeSeriesAPI(client, view_by_read_class) self.scenario = ScenarioAPI(client, view_by_read_class) self.total_bid_matrix_calculation_input = TotalBidMatrixCalculationInputAPI(client, view_by_read_class) self.total_bid_matrix_calculation_output = TotalBidMatrixCalculationOutputAPI(client, view_by_read_class) @@ -222,7 +239,6 @@ def __init__(self, client: CogniteClient): data_classes.MarketConfiguration: dm.ViewId("sp_powerops_models", "MarketConfiguration", "1"), data_classes.Plant: dm.ViewId("sp_powerops_models", "Plant", "1"), data_classes.PriceArea: dm.ViewId("sp_powerops_models", "PriceArea", "1"), - data_classes.PriceScenario: dm.ViewId("sp_powerops_models", "PriceScenario", "1"), data_classes.Reservoir: dm.ViewId("sp_powerops_models", "Reservoir", "1"), data_classes.TaskDispatcherWaterInput: dm.ViewId("sp_powerops_models", "TaskDispatcherWaterInput", "1"), data_classes.TaskDispatcherWaterOutput: dm.ViewId("sp_powerops_models", "TaskDispatcherWaterOutput", "1"), @@ -247,7 +263,6 @@ def __init__(self, client: CogniteClient): self.market_configuration = MarketConfigurationAPI(client, view_by_read_class) self.plant = PlantAPI(client, view_by_read_class) self.price_area = PriceAreaAPI(client, view_by_read_class) - self.price_scenario = PriceScenarioAPI(client, view_by_read_class) self.reservoir = ReservoirAPI(client, view_by_read_class) self.task_dispatcher_water_input = TaskDispatcherWaterInputAPI(client, view_by_read_class) self.task_dispatcher_water_output = TaskDispatcherWaterOutputAPI(client, view_by_read_class) @@ -277,15 +292,17 @@ def __init__(self, client: CogniteClient): data_classes.BidMethodDayAhead: dm.ViewId("sp_powerops_models", "BidMethodDayAhead", "1"), data_classes.BidMethodSHOPMultiScenario: dm.ViewId("sp_powerops_models", "BidMethodSHOPMultiScenario", "1"), data_classes.BidMethodWaterValue: dm.ViewId("sp_powerops_models", "BidMethodWaterValue", "1"), + data_classes.Commands: dm.ViewId("sp_powerops_models", "Commands", "1"), data_classes.Generator: dm.ViewId("sp_powerops_models", "Generator", "1"), data_classes.GeneratorEfficiencyCurve: dm.ViewId("sp_powerops_models", "GeneratorEfficiencyCurve", "1"), data_classes.Mapping: dm.ViewId("sp_powerops_models", "Mapping", "1"), data_classes.MarketConfiguration: dm.ViewId("sp_powerops_models", "MarketConfiguration", "1"), + data_classes.ModelTemplate: dm.ViewId("sp_powerops_models", "ModelTemplate", "1"), data_classes.Plant: dm.ViewId("sp_powerops_models", "Plant", "1"), data_classes.PlantShop: dm.ViewId("sp_powerops_models", "PlantShop", "1"), data_classes.PriceArea: dm.ViewId("sp_powerops_models", "PriceArea", "1"), - data_classes.PriceScenario: dm.ViewId("sp_powerops_models", "PriceScenario", "1"), data_classes.Reservoir: dm.ViewId("sp_powerops_models", "Reservoir", "1"), + data_classes.Scenario: dm.ViewId("sp_powerops_models", "Scenario", "1"), data_classes.TurbineEfficiencyCurve: dm.ViewId("sp_powerops_models", "TurbineEfficiencyCurve", "1"), data_classes.Watercourse: dm.ViewId("sp_powerops_models", "Watercourse", "1"), data_classes.WatercourseShop: dm.ViewId("sp_powerops_models", "WatercourseShop", "1"), @@ -299,15 +316,17 @@ def __init__(self, client: CogniteClient): self.bid_method_day_ahead = BidMethodDayAheadAPI(client, view_by_read_class) self.bid_method_shop_multi_scenario = BidMethodSHOPMultiScenarioAPI(client, view_by_read_class) self.bid_method_water_value = BidMethodWaterValueAPI(client, view_by_read_class) + self.commands = CommandsAPI(client, view_by_read_class) self.generator = GeneratorAPI(client, view_by_read_class) self.generator_efficiency_curve = GeneratorEfficiencyCurveAPI(client, view_by_read_class) self.mapping = MappingAPI(client, view_by_read_class) self.market_configuration = MarketConfigurationAPI(client, view_by_read_class) + self.model_template = ModelTemplateAPI(client, view_by_read_class) self.plant = PlantAPI(client, view_by_read_class) self.plant_shop = PlantShopAPI(client, view_by_read_class) self.price_area = PriceAreaAPI(client, view_by_read_class) - self.price_scenario = PriceScenarioAPI(client, view_by_read_class) self.reservoir = ReservoirAPI(client, view_by_read_class) + self.scenario = ScenarioAPI(client, view_by_read_class) self.turbine_efficiency_curve = TurbineEfficiencyCurveAPI(client, view_by_read_class) self.watercourse = WatercourseAPI(client, view_by_read_class) self.watercourse_shop = WatercourseShopAPI(client, view_by_read_class) @@ -359,7 +378,6 @@ def __init__(self, client: CogniteClient): data_classes.GeneratorEfficiencyCurve: dm.ViewId("sp_powerops_models", "GeneratorEfficiencyCurve", "1"), data_classes.Plant: dm.ViewId("sp_powerops_models", "Plant", "1"), data_classes.PriceAreaAsset: dm.ViewId("sp_powerops_models", "PriceAreaAsset", "1"), - data_classes.PriceScenario: dm.ViewId("sp_powerops_models", "PriceScenario", "1"), data_classes.Reservoir: dm.ViewId("sp_powerops_models", "Reservoir", "1"), data_classes.TurbineEfficiencyCurve: dm.ViewId("sp_powerops_models", "TurbineEfficiencyCurve", "1"), data_classes.Watercourse: dm.ViewId("sp_powerops_models", "Watercourse", "1"), @@ -371,7 +389,6 @@ def __init__(self, client: CogniteClient): self.generator_efficiency_curve = GeneratorEfficiencyCurveAPI(client, view_by_read_class) self.plant = PlantAPI(client, view_by_read_class) self.price_area_asset = PriceAreaAssetAPI(client, view_by_read_class) - self.price_scenario = PriceScenarioAPI(client, view_by_read_class) self.reservoir = ReservoirAPI(client, view_by_read_class) self.turbine_efficiency_curve = TurbineEfficiencyCurveAPI(client, view_by_read_class) self.watercourse = WatercourseAPI(client, view_by_read_class) @@ -398,13 +415,14 @@ def __init__(self, client: CogniteClient): data_classes.BidMethodDayAhead: dm.ViewId("sp_powerops_models", "BidMethodDayAhead", "1"), data_classes.BidMethodSHOPMultiScenario: dm.ViewId("sp_powerops_models", "BidMethodSHOPMultiScenario", "1"), data_classes.BidMethodWaterValue: dm.ViewId("sp_powerops_models", "BidMethodWaterValue", "1"), + data_classes.Case: dm.ViewId("sp_powerops_models", "Case", "1"), + data_classes.Commands: dm.ViewId("sp_powerops_models", "Commands", "1"), data_classes.CustomBidMatrix: dm.ViewId("sp_powerops_models", "CustomBidMatrix", "1"), data_classes.Mapping: dm.ViewId("sp_powerops_models", "Mapping", "1"), data_classes.ModelTemplate: dm.ViewId("sp_powerops_models", "ModelTemplate", "1"), data_classes.MultiScenarioMatrix: dm.ViewId("sp_powerops_models", "MultiScenarioMatrix", "1"), data_classes.PriceArea: dm.ViewId("sp_powerops_models", "PriceArea", "1"), - data_classes.PriceScenario: dm.ViewId("sp_powerops_models", "PriceScenario", "1"), - data_classes.SHOPResult: dm.ViewId("sp_powerops_models", "SHOPResult", "1"), + data_classes.PriceProdCase: dm.ViewId("sp_powerops_models", "PriceProdCase", "1"), data_classes.Scenario: dm.ViewId("sp_powerops_models", "Scenario", "1"), data_classes.WatercourseShop: dm.ViewId("sp_powerops_models", "WatercourseShop", "1"), } @@ -418,13 +436,14 @@ def __init__(self, client: CogniteClient): self.bid_method_day_ahead = BidMethodDayAheadAPI(client, view_by_read_class) self.bid_method_shop_multi_scenario = BidMethodSHOPMultiScenarioAPI(client, view_by_read_class) self.bid_method_water_value = BidMethodWaterValueAPI(client, view_by_read_class) + self.case = CaseAPI(client, view_by_read_class) + self.commands = CommandsAPI(client, view_by_read_class) self.custom_bid_matrix = CustomBidMatrixAPI(client, view_by_read_class) self.mapping = MappingAPI(client, view_by_read_class) self.model_template = ModelTemplateAPI(client, view_by_read_class) self.multi_scenario_matrix = MultiScenarioMatrixAPI(client, view_by_read_class) self.price_area = PriceAreaAPI(client, view_by_read_class) - self.price_scenario = PriceScenarioAPI(client, view_by_read_class) - self.shop_result = SHOPResultAPI(client, view_by_read_class) + self.price_prod_case = PriceProdCaseAPI(client, view_by_read_class) self.scenario = ScenarioAPI(client, view_by_read_class) self.watercourse_shop = WatercourseShopAPI(client, view_by_read_class) @@ -435,8 +454,8 @@ class PowerOpsModelsV1Client: Generated with: pygen = 0.99.11 - cognite-sdk = 7.20.0 - pydantic = 2.6.1 + cognite-sdk = 7.26.0 + pydantic = 2.6.3 """ diff --git a/cognite/powerops/client/_generated/v1/data_classes/__init__.py b/cognite/powerops/client/_generated/v1/data_classes/__init__.py index 5928f35db..01c7c2fcc 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/__init__.py +++ b/cognite/powerops/client/_generated/v1/data_classes/__init__.py @@ -50,7 +50,9 @@ BidConfigurationShop, BidConfigurationShopApply, BidConfigurationShopApplyList, + BidConfigurationShopFields, BidConfigurationShopList, + BidConfigurationShopTextFields, BidConfigurationShopWrite, BidConfigurationShopWriteList, ) @@ -172,6 +174,17 @@ BidRowWrite, BidRowWriteList, ) +from ._case import Case, CaseApply, CaseApplyList, CaseFields, CaseList, CaseWrite, CaseWriteList +from ._commands import ( + Commands, + CommandsApply, + CommandsApplyList, + CommandsFields, + CommandsList, + CommandsTextFields, + CommandsWrite, + CommandsWriteList, +) from ._custom_bid_matrix import ( CustomBidMatrix, CustomBidMatrixApply, @@ -341,15 +354,14 @@ PriceAreaAssetWrite, PriceAreaAssetWriteList, ) -from ._price_scenario import ( - PriceScenario, - PriceScenarioApply, - PriceScenarioApplyList, - PriceScenarioFields, - PriceScenarioList, - PriceScenarioTextFields, - PriceScenarioWrite, - PriceScenarioWriteList, +from ._price_prod_case import ( + PriceProdCase, + PriceProdCaseApply, + PriceProdCaseApplyList, + PriceProdCaseFields, + PriceProdCaseList, + PriceProdCaseWrite, + PriceProdCaseWriteList, ) from ._reservoir import ( Reservoir, @@ -370,6 +382,25 @@ SHOPResultWrite, SHOPResultWriteList, ) +from ._shop_result_price_prod import ( + SHOPResultPriceProd, + SHOPResultPriceProdApply, + SHOPResultPriceProdApplyList, + SHOPResultPriceProdFields, + SHOPResultPriceProdList, + SHOPResultPriceProdWrite, + SHOPResultPriceProdWriteList, +) +from ._shop_time_series import ( + SHOPTimeSeries, + SHOPTimeSeriesApply, + SHOPTimeSeriesApplyList, + SHOPTimeSeriesFields, + SHOPTimeSeriesList, + SHOPTimeSeriesTextFields, + SHOPTimeSeriesWrite, + SHOPTimeSeriesWriteList, +) from ._shop_trigger_input import ( SHOPTriggerInput, SHOPTriggerInputApply, @@ -400,16 +431,6 @@ ScenarioWrite, ScenarioWriteList, ) -from ._scenario_raw import ( - ScenarioRaw, - ScenarioRawApply, - ScenarioRawApplyList, - ScenarioRawFields, - ScenarioRawList, - ScenarioRawTextFields, - ScenarioRawWrite, - ScenarioRawWriteList, -) from ._shop_partial_bid_calculation_input import ( ShopPartialBidCalculationInput, ShopPartialBidCalculationInputApply, @@ -567,21 +588,15 @@ BidMatrixRaw.model_rebuild() BidMatrixRawWrite.model_rebuild() BidMatrixRawApply.model_rebuild() -BidMethodCustom.model_rebuild() -BidMethodCustomWrite.model_rebuild() -BidMethodCustomApply.model_rebuild() -BidMethodDayAhead.model_rebuild() -BidMethodDayAheadWrite.model_rebuild() -BidMethodDayAheadApply.model_rebuild() BidMethodSHOPMultiScenario.model_rebuild() BidMethodSHOPMultiScenarioWrite.model_rebuild() BidMethodSHOPMultiScenarioApply.model_rebuild() -BidMethodWaterValue.model_rebuild() -BidMethodWaterValueWrite.model_rebuild() -BidMethodWaterValueApply.model_rebuild() BidRow.model_rebuild() BidRowWrite.model_rebuild() BidRowApply.model_rebuild() +Case.model_rebuild() +CaseWrite.model_rebuild() +CaseApply.model_rebuild() CustomBidMatrix.model_rebuild() CustomBidMatrixWrite.model_rebuild() CustomBidMatrixApply.model_rebuild() @@ -615,9 +630,15 @@ PriceAreaAsset.model_rebuild() PriceAreaAssetWrite.model_rebuild() PriceAreaAssetApply.model_rebuild() +PriceProdCase.model_rebuild() +PriceProdCaseWrite.model_rebuild() +PriceProdCaseApply.model_rebuild() SHOPResult.model_rebuild() SHOPResultWrite.model_rebuild() SHOPResultApply.model_rebuild() +SHOPResultPriceProd.model_rebuild() +SHOPResultPriceProdWrite.model_rebuild() +SHOPResultPriceProdApply.model_rebuild() SHOPTriggerInput.model_rebuild() SHOPTriggerInputWrite.model_rebuild() SHOPTriggerInputApply.model_rebuild() @@ -627,9 +648,6 @@ Scenario.model_rebuild() ScenarioWrite.model_rebuild() ScenarioApply.model_rebuild() -ScenarioRaw.model_rebuild() -ScenarioRawWrite.model_rebuild() -ScenarioRawApply.model_rebuild() ShopPartialBidCalculationInput.model_rebuild() ShopPartialBidCalculationInputWrite.model_rebuild() ShopPartialBidCalculationInputApply.model_rebuild() @@ -709,6 +727,8 @@ "BidConfigurationShopList", "BidConfigurationShopWriteList", "BidConfigurationShopApplyList", + "BidConfigurationShopFields", + "BidConfigurationShopTextFields", "BidConfigurationWater", "BidConfigurationWaterWrite", "BidConfigurationWaterApply", @@ -803,6 +823,21 @@ "BidRowApplyList", "BidRowFields", "BidRowTextFields", + "Case", + "CaseWrite", + "CaseApply", + "CaseList", + "CaseWriteList", + "CaseApplyList", + "CaseFields", + "Commands", + "CommandsWrite", + "CommandsApply", + "CommandsList", + "CommandsWriteList", + "CommandsApplyList", + "CommandsFields", + "CommandsTextFields", "CustomBidMatrix", "CustomBidMatrixWrite", "CustomBidMatrixApply", @@ -938,14 +973,13 @@ "PriceAreaAssetApplyList", "PriceAreaAssetFields", "PriceAreaAssetTextFields", - "PriceScenario", - "PriceScenarioWrite", - "PriceScenarioApply", - "PriceScenarioList", - "PriceScenarioWriteList", - "PriceScenarioApplyList", - "PriceScenarioFields", - "PriceScenarioTextFields", + "PriceProdCase", + "PriceProdCaseWrite", + "PriceProdCaseApply", + "PriceProdCaseList", + "PriceProdCaseWriteList", + "PriceProdCaseApplyList", + "PriceProdCaseFields", "Reservoir", "ReservoirWrite", "ReservoirApply", @@ -961,6 +995,21 @@ "SHOPResultWriteList", "SHOPResultApplyList", "SHOPResultFields", + "SHOPResultPriceProd", + "SHOPResultPriceProdWrite", + "SHOPResultPriceProdApply", + "SHOPResultPriceProdList", + "SHOPResultPriceProdWriteList", + "SHOPResultPriceProdApplyList", + "SHOPResultPriceProdFields", + "SHOPTimeSeries", + "SHOPTimeSeriesWrite", + "SHOPTimeSeriesApply", + "SHOPTimeSeriesList", + "SHOPTimeSeriesWriteList", + "SHOPTimeSeriesApplyList", + "SHOPTimeSeriesFields", + "SHOPTimeSeriesTextFields", "SHOPTriggerInput", "SHOPTriggerInputWrite", "SHOPTriggerInputApply", @@ -985,14 +1034,6 @@ "ScenarioApplyList", "ScenarioFields", "ScenarioTextFields", - "ScenarioRaw", - "ScenarioRawWrite", - "ScenarioRawApply", - "ScenarioRawList", - "ScenarioRawWriteList", - "ScenarioRawApplyList", - "ScenarioRawFields", - "ScenarioRawTextFields", "ShopPartialBidCalculationInput", "ShopPartialBidCalculationInputWrite", "ShopPartialBidCalculationInputApply", diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_shop.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_shop.py index 2cf02bb8d..c49468dc0 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_shop.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_configuration_shop.py @@ -34,9 +34,19 @@ "BidConfigurationShopList", "BidConfigurationShopWriteList", "BidConfigurationShopApplyList", + "BidConfigurationShopFields", + "BidConfigurationShopTextFields", ] +BidConfigurationShopTextFields = Literal["name"] +BidConfigurationShopFields = Literal["name"] + +_BIDCONFIGURATIONSHOP_PROPERTIES_BY_FIELD = { + "name": "name", +} + + class BidConfigurationShop(BidConfiguration): """This represents the reading version of bid configuration shop. @@ -47,6 +57,7 @@ class BidConfigurationShop(BidConfiguration): external_id: The external id of the bid configuration shop. data_record: The data record of the bid configuration shop node. market_configuration: The bid method related to the bid configuration + name: The name of the bid configuration method: The bid method related to the bid configuration price_area: The price area related to the bid configuration plants_shop: The plants modelled in the shop runs @@ -56,6 +67,7 @@ class BidConfigurationShop(BidConfiguration): node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "BidConfigurationShop" ) + name: Optional[str] = None method: Union[BidMethodSHOPMultiScenario, str, dm.NodeId, None] = Field(None, repr=False) price_area: Union[PriceArea, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") plants_shop: Union[list[PlantShop], list[str], None] = Field(default=None, repr=False, alias="plantsShop") @@ -74,6 +86,7 @@ def as_write(self) -> BidConfigurationShopWrite: if isinstance(self.market_configuration, DomainModel) else self.market_configuration ), + name=self.name, method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, price_area=self.price_area.as_write() if isinstance(self.price_area, DomainModel) else self.price_area, plants_shop=[ @@ -106,6 +119,7 @@ class BidConfigurationShopWrite(BidConfigurationWrite): external_id: The external id of the bid configuration shop. data_record: The data record of the bid configuration shop node. market_configuration: The bid method related to the bid configuration + name: The name of the bid configuration method: The bid method related to the bid configuration price_area: The price area related to the bid configuration plants_shop: The plants modelled in the shop runs @@ -115,6 +129,7 @@ class BidConfigurationShopWrite(BidConfigurationWrite): node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "BidConfigurationShop" ) + name: Optional[str] = None method: Union[BidMethodSHOPMultiScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) price_area: Union[PriceAreaWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="priceArea") plants_shop: Union[list[PlantShopWrite], list[str], None] = Field(default=None, repr=False, alias="plantsShop") @@ -148,6 +163,9 @@ def _to_instances_write( ), } + if self.name is not None or write_none: + properties["name"] = self.name + if self.method is not None: properties["method"] = { "space": self.space if isinstance(self.method, str) else self.method.space, @@ -252,6 +270,8 @@ class BidConfigurationShopApplyList(BidConfigurationShopWriteList): ... def _create_bid_configuration_shop_filter( view_id: dm.ViewId, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + name: str | list[str] | None = None, + name_prefix: str | None = None, method: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, price_area: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, @@ -287,6 +307,12 @@ def _create_bid_configuration_shop_filter( values=[{"space": item[0], "externalId": item[1]} for item in market_configuration], ) ) + if isinstance(name, str): + filters.append(dm.filters.Equals(view_id.as_property_ref("name"), value=name)) + if name and isinstance(name, list): + filters.append(dm.filters.In(view_id.as_property_ref("name"), values=name)) + if name_prefix is not None: + filters.append(dm.filters.Prefix(view_id.as_property_ref("name"), value=name_prefix)) if method and isinstance(method, str): filters.append( dm.filters.Equals( diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_custom.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_custom.py index 24bb2f8cc..418e3065b 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_custom.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_custom.py @@ -1,10 +1,9 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, Literal, Optional, Union +from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm -from pydantic import Field from ._core import ( DEFAULT_INSTANCE_SPACE, @@ -19,9 +18,6 @@ ) from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadWrite -if TYPE_CHECKING: - from ._mapping import Mapping, MappingWrite - __all__ = [ "BidMethodCustom", @@ -53,7 +49,6 @@ class BidMethodCustom(BidMethodDayAhead): external_id: The external id of the bid method custom. data_record: The data record of the bid method custom node. name: Name for the BidMethod - main_scenario: The main scenario to use when running the bid method """ node_type: Union[dm.DirectRelationReference, None] = None @@ -65,9 +60,6 @@ def as_write(self) -> BidMethodCustomWrite: external_id=self.external_id, data_record=DataRecordWrite(existing_version=self.data_record.version), name=self.name, - main_scenario=( - self.main_scenario.as_write() if isinstance(self.main_scenario, DomainModel) else self.main_scenario - ), ) def as_apply(self) -> BidMethodCustomWrite: @@ -90,7 +82,6 @@ class BidMethodCustomWrite(BidMethodDayAheadWrite): external_id: The external id of the bid method custom. data_record: The data record of the bid method custom node. name: Name for the BidMethod - main_scenario: The main scenario to use when running the bid method """ node_type: Union[dm.DirectRelationReference, None] = None @@ -114,14 +105,6 @@ def _to_instances_write( if self.name is not None: properties["name"] = self.name - if self.main_scenario is not None: - properties["mainScenario"] = { - "space": self.space if isinstance(self.main_scenario, str) else self.main_scenario.space, - "externalId": ( - self.main_scenario if isinstance(self.main_scenario, str) else self.main_scenario.external_id - ), - } - if properties: this_node = dm.NodeApply( space=self.space, @@ -138,10 +121,6 @@ def _to_instances_write( resources.nodes.append(this_node) cache.add(self.as_tuple_id()) - if isinstance(self.main_scenario, DomainModelWrite): - other_resources = self.main_scenario._to_instances_write(cache, view_by_read_class) - resources.extend(other_resources) - return resources @@ -189,7 +168,6 @@ def _create_bid_method_custom_filter( view_id: dm.ViewId, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, @@ -201,34 +179,6 @@ def _create_bid_method_custom_filter( filters.append(dm.filters.In(view_id.as_property_ref("name"), values=name)) if name_prefix is not None: filters.append(dm.filters.Prefix(view_id.as_property_ref("name"), value=name_prefix)) - if main_scenario and isinstance(main_scenario, str): - filters.append( - dm.filters.Equals( - view_id.as_property_ref("mainScenario"), - value={"space": DEFAULT_INSTANCE_SPACE, "externalId": main_scenario}, - ) - ) - if main_scenario and isinstance(main_scenario, tuple): - filters.append( - dm.filters.Equals( - view_id.as_property_ref("mainScenario"), - value={"space": main_scenario[0], "externalId": main_scenario[1]}, - ) - ) - if main_scenario and isinstance(main_scenario, list) and isinstance(main_scenario[0], str): - filters.append( - dm.filters.In( - view_id.as_property_ref("mainScenario"), - values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in main_scenario], - ) - ) - if main_scenario and isinstance(main_scenario, list) and isinstance(main_scenario[0], tuple): - filters.append( - dm.filters.In( - view_id.as_property_ref("mainScenario"), - values=[{"space": item[0], "externalId": item[1]} for item in main_scenario], - ) - ) if external_id_prefix is not None: filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) if isinstance(space, str): diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_day_ahead.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_day_ahead.py index 9158da714..498575a7e 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_day_ahead.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_day_ahead.py @@ -1,10 +1,9 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, Literal, Optional, Union +from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm -from pydantic import Field from ._core import ( DEFAULT_INSTANCE_SPACE, @@ -19,9 +18,6 @@ ) from ._bid_method import BidMethod, BidMethodWrite -if TYPE_CHECKING: - from ._mapping import Mapping, MappingWrite - __all__ = [ "BidMethodDayAhead", @@ -53,11 +49,9 @@ class BidMethodDayAhead(BidMethod): external_id: The external id of the bid method day ahead. data_record: The data record of the bid method day ahead node. name: Name for the BidMethod - main_scenario: The main scenario to use when running the bid method """ node_type: Union[dm.DirectRelationReference, None] = None - main_scenario: Union[Mapping, str, dm.NodeId, None] = Field(None, repr=False, alias="mainScenario") def as_write(self) -> BidMethodDayAheadWrite: """Convert this read version of bid method day ahead to the writing version.""" @@ -66,9 +60,6 @@ def as_write(self) -> BidMethodDayAheadWrite: external_id=self.external_id, data_record=DataRecordWrite(existing_version=self.data_record.version), name=self.name, - main_scenario=( - self.main_scenario.as_write() if isinstance(self.main_scenario, DomainModel) else self.main_scenario - ), ) def as_apply(self) -> BidMethodDayAheadWrite: @@ -91,11 +82,9 @@ class BidMethodDayAheadWrite(BidMethodWrite): external_id: The external id of the bid method day ahead. data_record: The data record of the bid method day ahead node. name: Name for the BidMethod - main_scenario: The main scenario to use when running the bid method """ node_type: Union[dm.DirectRelationReference, None] = None - main_scenario: Union[MappingWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="mainScenario") def _to_instances_write( self, @@ -116,14 +105,6 @@ def _to_instances_write( if self.name is not None: properties["name"] = self.name - if self.main_scenario is not None: - properties["mainScenario"] = { - "space": self.space if isinstance(self.main_scenario, str) else self.main_scenario.space, - "externalId": ( - self.main_scenario if isinstance(self.main_scenario, str) else self.main_scenario.external_id - ), - } - if properties: this_node = dm.NodeApply( space=self.space, @@ -140,10 +121,6 @@ def _to_instances_write( resources.nodes.append(this_node) cache.add(self.as_tuple_id()) - if isinstance(self.main_scenario, DomainModelWrite): - other_resources = self.main_scenario._to_instances_write(cache, view_by_read_class) - resources.extend(other_resources) - return resources @@ -191,7 +168,6 @@ def _create_bid_method_day_ahead_filter( view_id: dm.ViewId, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, @@ -203,34 +179,6 @@ def _create_bid_method_day_ahead_filter( filters.append(dm.filters.In(view_id.as_property_ref("name"), values=name)) if name_prefix is not None: filters.append(dm.filters.Prefix(view_id.as_property_ref("name"), value=name_prefix)) - if main_scenario and isinstance(main_scenario, str): - filters.append( - dm.filters.Equals( - view_id.as_property_ref("mainScenario"), - value={"space": DEFAULT_INSTANCE_SPACE, "externalId": main_scenario}, - ) - ) - if main_scenario and isinstance(main_scenario, tuple): - filters.append( - dm.filters.Equals( - view_id.as_property_ref("mainScenario"), - value={"space": main_scenario[0], "externalId": main_scenario[1]}, - ) - ) - if main_scenario and isinstance(main_scenario, list) and isinstance(main_scenario[0], str): - filters.append( - dm.filters.In( - view_id.as_property_ref("mainScenario"), - values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in main_scenario], - ) - ) - if main_scenario and isinstance(main_scenario, list) and isinstance(main_scenario[0], tuple): - filters.append( - dm.filters.In( - view_id.as_property_ref("mainScenario"), - values=[{"space": item[0], "externalId": item[1]} for item in main_scenario], - ) - ) if external_id_prefix is not None: filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) if isinstance(space, str): diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_shop_multi_scenario.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_shop_multi_scenario.py index f54fed95f..bef8f66ef 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_shop_multi_scenario.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_shop_multi_scenario.py @@ -20,7 +20,7 @@ from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadWrite if TYPE_CHECKING: - from ._mapping import Mapping, MappingWrite + from ._scenario import Scenario, ScenarioWrite __all__ = [ @@ -35,11 +35,18 @@ ] -BidMethodSHOPMultiScenarioTextFields = Literal["name"] -BidMethodSHOPMultiScenarioFields = Literal["name"] +BidMethodSHOPMultiScenarioTextFields = Literal[ + "name", "shop_start_specification", "shop_end_specification", "shop_bid_date_specification" +] +BidMethodSHOPMultiScenarioFields = Literal[ + "name", "shop_start_specification", "shop_end_specification", "shop_bid_date_specification" +] _BIDMETHODSHOPMULTISCENARIO_PROPERTIES_BY_FIELD = { "name": "name", + "shop_start_specification": "shopStartSpecification", + "shop_end_specification": "shopEndSpecification", + "shop_bid_date_specification": "shopBidDateSpecification", } @@ -53,14 +60,19 @@ class BidMethodSHOPMultiScenario(BidMethodDayAhead): external_id: The external id of the bid method shop multi scenario. data_record: The data record of the bid method shop multi scenario node. name: Name for the BidMethod - main_scenario: The main scenario to use when running the bid method - price_scenarios: The price scenarios to use in the shop run + shop_start_specification: The shop start specification + shop_end_specification: The shop end specification + shop_bid_date_specification: The shop bid date specification + scenarios: The scenarios to run this bid method with (includes incremental mappings and base mappings) """ node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "BidMethodSHOPMultiScenario" ) - price_scenarios: Union[list[Mapping], list[str], None] = Field(default=None, repr=False, alias="priceScenarios") + shop_start_specification: Optional[str] = Field(None, alias="shopStartSpecification") + shop_end_specification: Optional[str] = Field(None, alias="shopEndSpecification") + shop_bid_date_specification: Optional[str] = Field(None, alias="shopBidDateSpecification") + scenarios: Union[list[Scenario], list[str], None] = Field(default=None, repr=False) def as_write(self) -> BidMethodSHOPMultiScenarioWrite: """Convert this read version of bid method shop multi scenario to the writing version.""" @@ -69,12 +81,12 @@ def as_write(self) -> BidMethodSHOPMultiScenarioWrite: external_id=self.external_id, data_record=DataRecordWrite(existing_version=self.data_record.version), name=self.name, - main_scenario=( - self.main_scenario.as_write() if isinstance(self.main_scenario, DomainModel) else self.main_scenario - ), - price_scenarios=[ - price_scenario.as_write() if isinstance(price_scenario, DomainModel) else price_scenario - for price_scenario in self.price_scenarios or [] + shop_start_specification=self.shop_start_specification, + shop_end_specification=self.shop_end_specification, + shop_bid_date_specification=self.shop_bid_date_specification, + scenarios=[ + scenario.as_write() if isinstance(scenario, DomainModel) else scenario + for scenario in self.scenarios or [] ], ) @@ -98,16 +110,19 @@ class BidMethodSHOPMultiScenarioWrite(BidMethodDayAheadWrite): external_id: The external id of the bid method shop multi scenario. data_record: The data record of the bid method shop multi scenario node. name: Name for the BidMethod - main_scenario: The main scenario to use when running the bid method - price_scenarios: The price scenarios to use in the shop run + shop_start_specification: The shop start specification + shop_end_specification: The shop end specification + shop_bid_date_specification: The shop bid date specification + scenarios: The scenarios to run this bid method with (includes incremental mappings and base mappings) """ node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "BidMethodSHOPMultiScenario" ) - price_scenarios: Union[list[MappingWrite], list[str], None] = Field( - default=None, repr=False, alias="priceScenarios" - ) + shop_start_specification: Optional[str] = Field(None, alias="shopStartSpecification") + shop_end_specification: Optional[str] = Field(None, alias="shopEndSpecification") + shop_bid_date_specification: Optional[str] = Field(None, alias="shopBidDateSpecification") + scenarios: Union[list[ScenarioWrite], list[str], None] = Field(default=None, repr=False) def _to_instances_write( self, @@ -128,13 +143,14 @@ def _to_instances_write( if self.name is not None: properties["name"] = self.name - if self.main_scenario is not None: - properties["mainScenario"] = { - "space": self.space if isinstance(self.main_scenario, str) else self.main_scenario.space, - "externalId": ( - self.main_scenario if isinstance(self.main_scenario, str) else self.main_scenario.external_id - ), - } + if self.shop_start_specification is not None or write_none: + properties["shopStartSpecification"] = self.shop_start_specification + + if self.shop_end_specification is not None or write_none: + properties["shopEndSpecification"] = self.shop_end_specification + + if self.shop_bid_date_specification is not None or write_none: + properties["shopBidDateSpecification"] = self.shop_bid_date_specification if properties: this_node = dm.NodeApply( @@ -152,21 +168,13 @@ def _to_instances_write( resources.nodes.append(this_node) cache.add(self.as_tuple_id()) - edge_type = dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.priceScenarios") - for price_scenario in self.price_scenarios or []: + edge_type = dm.DirectRelationReference("sp_powerops_types", "BidMethodDayahead.scenarios") + for scenario in self.scenarios or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, - start_node=self, - end_node=price_scenario, - edge_type=edge_type, - view_by_read_class=view_by_read_class, + cache, start_node=self, end_node=scenario, edge_type=edge_type, view_by_read_class=view_by_read_class ) resources.extend(other_resources) - if isinstance(self.main_scenario, DomainModelWrite): - other_resources = self.main_scenario._to_instances_write(cache, view_by_read_class) - resources.extend(other_resources) - return resources @@ -214,7 +222,12 @@ def _create_bid_method_shop_multi_scenario_filter( view_id: dm.ViewId, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + shop_start_specification: str | list[str] | None = None, + shop_start_specification_prefix: str | None = None, + shop_end_specification: str | list[str] | None = None, + shop_end_specification_prefix: str | None = None, + shop_bid_date_specification: str | list[str] | None = None, + shop_bid_date_specification_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, @@ -226,32 +239,38 @@ def _create_bid_method_shop_multi_scenario_filter( filters.append(dm.filters.In(view_id.as_property_ref("name"), values=name)) if name_prefix is not None: filters.append(dm.filters.Prefix(view_id.as_property_ref("name"), value=name_prefix)) - if main_scenario and isinstance(main_scenario, str): + if isinstance(shop_start_specification, str): filters.append( - dm.filters.Equals( - view_id.as_property_ref("mainScenario"), - value={"space": DEFAULT_INSTANCE_SPACE, "externalId": main_scenario}, - ) + dm.filters.Equals(view_id.as_property_ref("shopStartSpecification"), value=shop_start_specification) ) - if main_scenario and isinstance(main_scenario, tuple): + if shop_start_specification and isinstance(shop_start_specification, list): filters.append( - dm.filters.Equals( - view_id.as_property_ref("mainScenario"), - value={"space": main_scenario[0], "externalId": main_scenario[1]}, - ) + dm.filters.In(view_id.as_property_ref("shopStartSpecification"), values=shop_start_specification) ) - if main_scenario and isinstance(main_scenario, list) and isinstance(main_scenario[0], str): + if shop_start_specification_prefix is not None: filters.append( - dm.filters.In( - view_id.as_property_ref("mainScenario"), - values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in main_scenario], - ) + dm.filters.Prefix(view_id.as_property_ref("shopStartSpecification"), value=shop_start_specification_prefix) + ) + if isinstance(shop_end_specification, str): + filters.append(dm.filters.Equals(view_id.as_property_ref("shopEndSpecification"), value=shop_end_specification)) + if shop_end_specification and isinstance(shop_end_specification, list): + filters.append(dm.filters.In(view_id.as_property_ref("shopEndSpecification"), values=shop_end_specification)) + if shop_end_specification_prefix is not None: + filters.append( + dm.filters.Prefix(view_id.as_property_ref("shopEndSpecification"), value=shop_end_specification_prefix) + ) + if isinstance(shop_bid_date_specification, str): + filters.append( + dm.filters.Equals(view_id.as_property_ref("shopBidDateSpecification"), value=shop_bid_date_specification) + ) + if shop_bid_date_specification and isinstance(shop_bid_date_specification, list): + filters.append( + dm.filters.In(view_id.as_property_ref("shopBidDateSpecification"), values=shop_bid_date_specification) ) - if main_scenario and isinstance(main_scenario, list) and isinstance(main_scenario[0], tuple): + if shop_bid_date_specification_prefix is not None: filters.append( - dm.filters.In( - view_id.as_property_ref("mainScenario"), - values=[{"space": item[0], "externalId": item[1]} for item in main_scenario], + dm.filters.Prefix( + view_id.as_property_ref("shopBidDateSpecification"), value=shop_bid_date_specification_prefix ) ) if external_id_prefix is not None: diff --git a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_water_value.py b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_water_value.py index 058483834..aca13c26b 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_bid_method_water_value.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_bid_method_water_value.py @@ -1,10 +1,9 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, Literal, Optional, Union +from typing import Any, Literal, Optional, Union from cognite.client import data_modeling as dm -from pydantic import Field from ._core import ( DEFAULT_INSTANCE_SPACE, @@ -19,9 +18,6 @@ ) from ._bid_method_day_ahead import BidMethodDayAhead, BidMethodDayAheadWrite -if TYPE_CHECKING: - from ._mapping import Mapping, MappingWrite - __all__ = [ "BidMethodWaterValue", @@ -53,7 +49,6 @@ class BidMethodWaterValue(BidMethodDayAhead): external_id: The external id of the bid method water value. data_record: The data record of the bid method water value node. name: Name for the BidMethod - main_scenario: The main scenario to use when running the bid method """ node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( @@ -67,9 +62,6 @@ def as_write(self) -> BidMethodWaterValueWrite: external_id=self.external_id, data_record=DataRecordWrite(existing_version=self.data_record.version), name=self.name, - main_scenario=( - self.main_scenario.as_write() if isinstance(self.main_scenario, DomainModel) else self.main_scenario - ), ) def as_apply(self) -> BidMethodWaterValueWrite: @@ -92,7 +84,6 @@ class BidMethodWaterValueWrite(BidMethodDayAheadWrite): external_id: The external id of the bid method water value. data_record: The data record of the bid method water value node. name: Name for the BidMethod - main_scenario: The main scenario to use when running the bid method """ node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( @@ -118,14 +109,6 @@ def _to_instances_write( if self.name is not None: properties["name"] = self.name - if self.main_scenario is not None: - properties["mainScenario"] = { - "space": self.space if isinstance(self.main_scenario, str) else self.main_scenario.space, - "externalId": ( - self.main_scenario if isinstance(self.main_scenario, str) else self.main_scenario.external_id - ), - } - if properties: this_node = dm.NodeApply( space=self.space, @@ -142,10 +125,6 @@ def _to_instances_write( resources.nodes.append(this_node) cache.add(self.as_tuple_id()) - if isinstance(self.main_scenario, DomainModelWrite): - other_resources = self.main_scenario._to_instances_write(cache, view_by_read_class) - resources.extend(other_resources) - return resources @@ -193,7 +172,6 @@ def _create_bid_method_water_value_filter( view_id: dm.ViewId, name: str | list[str] | None = None, name_prefix: str | None = None, - main_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, @@ -205,34 +183,6 @@ def _create_bid_method_water_value_filter( filters.append(dm.filters.In(view_id.as_property_ref("name"), values=name)) if name_prefix is not None: filters.append(dm.filters.Prefix(view_id.as_property_ref("name"), value=name_prefix)) - if main_scenario and isinstance(main_scenario, str): - filters.append( - dm.filters.Equals( - view_id.as_property_ref("mainScenario"), - value={"space": DEFAULT_INSTANCE_SPACE, "externalId": main_scenario}, - ) - ) - if main_scenario and isinstance(main_scenario, tuple): - filters.append( - dm.filters.Equals( - view_id.as_property_ref("mainScenario"), - value={"space": main_scenario[0], "externalId": main_scenario[1]}, - ) - ) - if main_scenario and isinstance(main_scenario, list) and isinstance(main_scenario[0], str): - filters.append( - dm.filters.In( - view_id.as_property_ref("mainScenario"), - values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in main_scenario], - ) - ) - if main_scenario and isinstance(main_scenario, list) and isinstance(main_scenario[0], tuple): - filters.append( - dm.filters.In( - view_id.as_property_ref("mainScenario"), - values=[{"space": item[0], "externalId": item[1]} for item in main_scenario], - ) - ) if external_id_prefix is not None: filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) if isinstance(space, str): diff --git a/cognite/powerops/client/_generated/v1/data_classes/_case.py b/cognite/powerops/client/_generated/v1/data_classes/_case.py new file mode 100644 index 000000000..9b10ef63f --- /dev/null +++ b/cognite/powerops/client/_generated/v1/data_classes/_case.py @@ -0,0 +1,312 @@ +from __future__ import annotations + +import datetime +import warnings +from typing import TYPE_CHECKING, Any, Literal, Optional, Union + +from cognite.client import data_modeling as dm +from pydantic import Field + +from ._core import ( + DEFAULT_INSTANCE_SPACE, + DataRecordWrite, + DomainModel, + DomainModelCore, + DomainModelWrite, + DomainModelWriteList, + DomainModelList, + DomainRelationWrite, + ResourcesWrite, +) + +if TYPE_CHECKING: + from ._scenario import Scenario, ScenarioWrite + + +__all__ = [ + "Case", + "CaseWrite", + "CaseApply", + "CaseList", + "CaseWriteList", + "CaseApplyList", + "CaseFields", + "CaseTextFields", +] + + +CaseTextFields = Literal["case_file", "reservoir_mapping", "cut_order_files", "extra_files"] +CaseFields = Literal[ + "case_file", + "reservoir_mapping", + "cut_order_files", + "extra_files", + "cog_shop_files_config", + "start_time", + "end_time", +] + +_CASE_PROPERTIES_BY_FIELD = { + "case_file": "caseFile", + "reservoir_mapping": "reservoirMapping", + "cut_order_files": "cutOrderFiles", + "extra_files": "extraFiles", + "cog_shop_files_config": "cogShopFilesConfig", + "start_time": "startTime", + "end_time": "endTime", +} + + +class Case(DomainModel): + """This represents the reading version of case. + + It is used to when data is retrieved from CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the case. + data_record: The data record of the case node. + scenario: The Shop scenario that was used to produce this result + case_file: The case file used + reservoir_mapping: The cut file reservoir mapping + cut_order_files: Cut order files (Module series in PRODRISK) + extra_files: The extra file field. + cog_shop_files_config: Configuration for in what order to load the various files into pyshop + start_time: The start time of the case + end_time: The end time of the case + """ + + space: str = DEFAULT_INSTANCE_SPACE + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference("sp_powerops_types", "Case") + scenario: Union[Scenario, str, dm.NodeId, None] = Field(None, repr=False) + case_file: Union[str, None] = Field(None, alias="caseFile") + reservoir_mapping: Optional[list[str]] = Field(None, alias="reservoirMapping") + cut_order_files: Optional[list[str]] = Field(None, alias="cutOrderFiles") + extra_files: Optional[list[str]] = Field(None, alias="extraFiles") + cog_shop_files_config: Optional[list[dict]] = Field(None, alias="cogShopFilesConfig") + start_time: Optional[datetime.date] = Field(None, alias="startTime") + end_time: Optional[datetime.date] = Field(None, alias="endTime") + + def as_write(self) -> CaseWrite: + """Convert this read version of case to the writing version.""" + return CaseWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=self.data_record.version), + scenario=self.scenario.as_write() if isinstance(self.scenario, DomainModel) else self.scenario, + case_file=self.case_file, + reservoir_mapping=self.reservoir_mapping, + cut_order_files=self.cut_order_files, + extra_files=self.extra_files, + cog_shop_files_config=self.cog_shop_files_config, + start_time=self.start_time, + end_time=self.end_time, + ) + + def as_apply(self) -> CaseWrite: + """Convert this read version of case to the writing version.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class CaseWrite(DomainModelWrite): + """This represents the writing version of case. + + It is used to when data is sent to CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the case. + data_record: The data record of the case node. + scenario: The Shop scenario that was used to produce this result + case_file: The case file used + reservoir_mapping: The cut file reservoir mapping + cut_order_files: Cut order files (Module series in PRODRISK) + extra_files: The extra file field. + cog_shop_files_config: Configuration for in what order to load the various files into pyshop + start_time: The start time of the case + end_time: The end time of the case + """ + + space: str = DEFAULT_INSTANCE_SPACE + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference("sp_powerops_types", "Case") + scenario: Union[ScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) + case_file: Union[str, None] = Field(None, alias="caseFile") + reservoir_mapping: Optional[list[str]] = Field(None, alias="reservoirMapping") + cut_order_files: Optional[list[str]] = Field(None, alias="cutOrderFiles") + extra_files: Optional[list[str]] = Field(None, alias="extraFiles") + cog_shop_files_config: Optional[list[dict]] = Field(None, alias="cogShopFilesConfig") + start_time: Optional[datetime.date] = Field(None, alias="startTime") + end_time: Optional[datetime.date] = Field(None, alias="endTime") + + def _to_instances_write( + self, + cache: set[tuple[str, str]], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, + write_none: bool = False, + ) -> ResourcesWrite: + resources = ResourcesWrite() + if self.as_tuple_id() in cache: + return resources + + write_view = (view_by_read_class or {}).get(Case, dm.ViewId("sp_powerops_models", "Case", "1")) + + properties: dict[str, Any] = {} + + if self.scenario is not None: + properties["scenario"] = { + "space": self.space if isinstance(self.scenario, str) else self.scenario.space, + "externalId": self.scenario if isinstance(self.scenario, str) else self.scenario.external_id, + } + + if self.case_file is not None or write_none: + properties["caseFile"] = self.case_file + + if self.reservoir_mapping is not None or write_none: + properties["reservoirMapping"] = self.reservoir_mapping + + if self.cut_order_files is not None or write_none: + properties["cutOrderFiles"] = self.cut_order_files + + if self.extra_files is not None or write_none: + properties["extraFiles"] = self.extra_files + + if self.cog_shop_files_config is not None or write_none: + properties["cogShopFilesConfig"] = self.cog_shop_files_config + + if self.start_time is not None or write_none: + properties["startTime"] = self.start_time.isoformat() if self.start_time else None + + if self.end_time is not None or write_none: + properties["endTime"] = self.end_time.isoformat() if self.end_time else None + + if properties: + this_node = dm.NodeApply( + space=self.space, + external_id=self.external_id, + existing_version=self.data_record.existing_version, + type=self.node_type, + sources=[ + dm.NodeOrEdgeData( + source=write_view, + properties=properties, + ) + ], + ) + resources.nodes.append(this_node) + cache.add(self.as_tuple_id()) + + if isinstance(self.scenario, DomainModelWrite): + other_resources = self.scenario._to_instances_write(cache, view_by_read_class) + resources.extend(other_resources) + + return resources + + +class CaseApply(CaseWrite): + def __new__(cls, *args, **kwargs) -> CaseApply: + warnings.warn( + "CaseApply is deprecated and will be removed in v1.0. Use CaseWrite instead." + "The motivation for this change is that Write is a more descriptive name for the writing version of the" + "Case.", + UserWarning, + stacklevel=2, + ) + return super().__new__(cls) + + +class CaseList(DomainModelList[Case]): + """List of cases in the read version.""" + + _INSTANCE = Case + + def as_write(self) -> CaseWriteList: + """Convert these read versions of case to the writing versions.""" + return CaseWriteList([node.as_write() for node in self.data]) + + def as_apply(self) -> CaseWriteList: + """Convert these read versions of primitive nullable to the writing versions.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class CaseWriteList(DomainModelWriteList[CaseWrite]): + """List of cases in the writing version.""" + + _INSTANCE = CaseWrite + + +class CaseApplyList(CaseWriteList): ... + + +def _create_case_filter( + view_id: dm.ViewId, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_start_time: datetime.date | None = None, + max_start_time: datetime.date | None = None, + min_end_time: datetime.date | None = None, + max_end_time: datetime.date | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + filter: dm.Filter | None = None, +) -> dm.Filter | None: + filters = [] + if scenario and isinstance(scenario, str): + filters.append( + dm.filters.Equals( + view_id.as_property_ref("scenario"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": scenario} + ) + ) + if scenario and isinstance(scenario, tuple): + filters.append( + dm.filters.Equals( + view_id.as_property_ref("scenario"), value={"space": scenario[0], "externalId": scenario[1]} + ) + ) + if scenario and isinstance(scenario, list) and isinstance(scenario[0], str): + filters.append( + dm.filters.In( + view_id.as_property_ref("scenario"), + values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in scenario], + ) + ) + if scenario and isinstance(scenario, list) and isinstance(scenario[0], tuple): + filters.append( + dm.filters.In( + view_id.as_property_ref("scenario"), + values=[{"space": item[0], "externalId": item[1]} for item in scenario], + ) + ) + if min_start_time is not None or max_start_time is not None: + filters.append( + dm.filters.Range( + view_id.as_property_ref("startTime"), + gte=min_start_time.isoformat() if min_start_time else None, + lte=max_start_time.isoformat() if max_start_time else None, + ) + ) + if min_end_time is not None or max_end_time is not None: + filters.append( + dm.filters.Range( + view_id.as_property_ref("endTime"), + gte=min_end_time.isoformat() if min_end_time else None, + lte=max_end_time.isoformat() if max_end_time else None, + ) + ) + if external_id_prefix is not None: + filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) + if isinstance(space, str): + filters.append(dm.filters.Equals(["node", "space"], value=space)) + if space and isinstance(space, list): + filters.append(dm.filters.In(["node", "space"], values=space)) + if filter: + filters.append(filter) + return dm.filters.And(*filters) if filters else None diff --git a/cognite/powerops/client/_generated/v1/data_classes/_commands.py b/cognite/powerops/client/_generated/v1/data_classes/_commands.py new file mode 100644 index 000000000..188c56c33 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/data_classes/_commands.py @@ -0,0 +1,183 @@ +from __future__ import annotations + +import warnings +from typing import Any, Literal, Optional, Union + +from cognite.client import data_modeling as dm + +from ._core import ( + DEFAULT_INSTANCE_SPACE, + DataRecordWrite, + DomainModel, + DomainModelCore, + DomainModelWrite, + DomainModelWriteList, + DomainModelList, + DomainRelationWrite, + ResourcesWrite, +) + + +__all__ = [ + "Commands", + "CommandsWrite", + "CommandsApply", + "CommandsList", + "CommandsWriteList", + "CommandsApplyList", + "CommandsFields", + "CommandsTextFields", +] + + +CommandsTextFields = Literal["commands"] +CommandsFields = Literal["commands"] + +_COMMANDS_PROPERTIES_BY_FIELD = { + "commands": "commands", +} + + +class Commands(DomainModel): + """This represents the reading version of command. + + It is used to when data is retrieved from CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the command. + data_record: The data record of the command node. + commands: The commands used in the shop model file + """ + + space: str = DEFAULT_INSTANCE_SPACE + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference("sp_powerops_types", "Commands") + commands: Optional[list[str]] = None + + def as_write(self) -> CommandsWrite: + """Convert this read version of command to the writing version.""" + return CommandsWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=self.data_record.version), + commands=self.commands, + ) + + def as_apply(self) -> CommandsWrite: + """Convert this read version of command to the writing version.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class CommandsWrite(DomainModelWrite): + """This represents the writing version of command. + + It is used to when data is sent to CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the command. + data_record: The data record of the command node. + commands: The commands used in the shop model file + """ + + space: str = DEFAULT_INSTANCE_SPACE + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference("sp_powerops_types", "Commands") + commands: list[str] + + def _to_instances_write( + self, + cache: set[tuple[str, str]], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, + write_none: bool = False, + ) -> ResourcesWrite: + resources = ResourcesWrite() + if self.as_tuple_id() in cache: + return resources + + write_view = (view_by_read_class or {}).get(Commands, dm.ViewId("sp_powerops_models", "Commands", "1")) + + properties: dict[str, Any] = {} + + if self.commands is not None: + properties["commands"] = self.commands + + if properties: + this_node = dm.NodeApply( + space=self.space, + external_id=self.external_id, + existing_version=self.data_record.existing_version, + type=self.node_type, + sources=[ + dm.NodeOrEdgeData( + source=write_view, + properties=properties, + ) + ], + ) + resources.nodes.append(this_node) + cache.add(self.as_tuple_id()) + + return resources + + +class CommandsApply(CommandsWrite): + def __new__(cls, *args, **kwargs) -> CommandsApply: + warnings.warn( + "CommandsApply is deprecated and will be removed in v1.0. Use CommandsWrite instead." + "The motivation for this change is that Write is a more descriptive name for the writing version of the" + "Commands.", + UserWarning, + stacklevel=2, + ) + return super().__new__(cls) + + +class CommandsList(DomainModelList[Commands]): + """List of commands in the read version.""" + + _INSTANCE = Commands + + def as_write(self) -> CommandsWriteList: + """Convert these read versions of command to the writing versions.""" + return CommandsWriteList([node.as_write() for node in self.data]) + + def as_apply(self) -> CommandsWriteList: + """Convert these read versions of primitive nullable to the writing versions.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class CommandsWriteList(DomainModelWriteList[CommandsWrite]): + """List of commands in the writing version.""" + + _INSTANCE = CommandsWrite + + +class CommandsApplyList(CommandsWriteList): ... + + +def _create_command_filter( + view_id: dm.ViewId, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + filter: dm.Filter | None = None, +) -> dm.Filter | None: + filters = [] + if external_id_prefix is not None: + filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) + if isinstance(space, str): + filters.append(dm.filters.Equals(["node", "space"], value=space)) + if space and isinstance(space, list): + filters.append(dm.filters.In(["node", "space"], values=space)) + if filter: + filters.append(filter) + return dm.filters.And(*filters) if filters else None diff --git a/cognite/powerops/client/_generated/v1/data_classes/_market_configuration.py b/cognite/powerops/client/_generated/v1/data_classes/_market_configuration.py index 531cea154..7ffbfc776 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_market_configuration.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_market_configuration.py @@ -31,8 +31,9 @@ ] -MarketConfigurationTextFields = Literal["market_type", "time_zone", "price_unit", "time_unit"] +MarketConfigurationTextFields = Literal["name", "market_type", "time_zone", "price_unit", "time_unit"] MarketConfigurationFields = Literal[ + "name", "market_type", "max_price", "min_price", @@ -45,6 +46,7 @@ ] _MARKETCONFIGURATION_PROPERTIES_BY_FIELD = { + "name": "name", "market_type": "marketType", "max_price": "maxPrice", "min_price": "minPrice", @@ -66,6 +68,7 @@ class MarketConfiguration(DomainModel): space: The space where the node is located. external_id: The external id of the market configuration. data_record: The data record of the market configuration node. + name: The name of the market market_type: The market type max_price: The maximum price min_price: The minimum price @@ -81,6 +84,7 @@ class MarketConfiguration(DomainModel): node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "MarketConfiguration" ) + name: Optional[str] = None market_type: str = Field(alias="marketType") max_price: float = Field(alias="maxPrice") min_price: float = Field(alias="minPrice") @@ -97,6 +101,7 @@ def as_write(self) -> MarketConfigurationWrite: space=self.space, external_id=self.external_id, data_record=DataRecordWrite(existing_version=self.data_record.version), + name=self.name, market_type=self.market_type, max_price=self.max_price, min_price=self.min_price, @@ -127,6 +132,7 @@ class MarketConfigurationWrite(DomainModelWrite): space: The space where the node is located. external_id: The external id of the market configuration. data_record: The data record of the market configuration node. + name: The name of the market market_type: The market type max_price: The maximum price min_price: The minimum price @@ -142,6 +148,7 @@ class MarketConfigurationWrite(DomainModelWrite): node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "MarketConfiguration" ) + name: Optional[str] = None market_type: str = Field(alias="marketType") max_price: float = Field(alias="maxPrice") min_price: float = Field(alias="minPrice") @@ -168,6 +175,9 @@ def _to_instances_write( properties: dict[str, Any] = {} + if self.name is not None or write_none: + properties["name"] = self.name + if self.market_type is not None: properties["marketType"] = self.market_type @@ -256,6 +266,8 @@ class MarketConfigurationApplyList(MarketConfigurationWriteList): ... def _create_market_configuration_filter( view_id: dm.ViewId, + name: str | list[str] | None = None, + name_prefix: str | None = None, market_type: str | list[str] | None = None, market_type_prefix: str | None = None, min_max_price: float | None = None, @@ -279,6 +291,12 @@ def _create_market_configuration_filter( filter: dm.Filter | None = None, ) -> dm.Filter | None: filters = [] + if isinstance(name, str): + filters.append(dm.filters.Equals(view_id.as_property_ref("name"), value=name)) + if name and isinstance(name, list): + filters.append(dm.filters.In(view_id.as_property_ref("name"), values=name)) + if name_prefix is not None: + filters.append(dm.filters.Prefix(view_id.as_property_ref("name"), value=name_prefix)) if isinstance(market_type, str): filters.append(dm.filters.Equals(view_id.as_property_ref("marketType"), value=market_type)) if market_type and isinstance(market_type, list): diff --git a/cognite/powerops/client/_generated/v1/data_classes/_model_template.py b/cognite/powerops/client/_generated/v1/data_classes/_model_template.py index 1dfbe0ea6..2af566ec0 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_model_template.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_model_template.py @@ -35,14 +35,15 @@ ] -ModelTemplateTextFields = Literal["cog_shop_version", "shop_version", "model", "source"] -ModelTemplateFields = Literal["cog_shop_version", "shop_version", "model", "source"] +ModelTemplateTextFields = Literal["version_", "shop_version", "model", "extra_files"] +ModelTemplateFields = Literal["version_", "shop_version", "model", "cog_shop_files_config", "extra_files"] _MODELTEMPLATE_PROPERTIES_BY_FIELD = { - "cog_shop_version": "cogShopVersion", + "version_": "version", "shop_version": "shopVersion", "model": "model", - "source": "source", + "cog_shop_files_config": "cogShopFilesConfig", + "extra_files": "extraFiles", } @@ -55,11 +56,12 @@ class ModelTemplate(DomainModel, protected_namespaces=()): space: The space where the node is located. external_id: The external id of the model template. data_record: The data record of the model template node. - cog_shop_version: The tag of the cogshop image to run + version_: The version of the model shop_version: The version of SHOP to run watercourse: The watercourse to run the model for model: The shop model file to use as template before applying base mapping - source: The source of the model, for example, 'resync' + cog_shop_files_config: Configuration for in what order to load the various files into pyshop + extra_files: Extra files related to a model template base_mappings: The base mappings for the model """ @@ -67,11 +69,12 @@ class ModelTemplate(DomainModel, protected_namespaces=()): node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "ModelTemplate" ) - cog_shop_version: str = Field(alias="cogShopVersion") + version_: Optional[str] = Field(None, alias="version") shop_version: str = Field(alias="shopVersion") watercourse: Union[WatercourseShop, str, dm.NodeId, None] = Field(None, repr=False) model: Union[str, None] = None - source: Optional[str] = None + cog_shop_files_config: Optional[list[dict]] = Field(None, alias="cogShopFilesConfig") + extra_files: Optional[list[str]] = Field(None, alias="extraFiles") base_mappings: Union[list[Mapping], list[str], None] = Field(default=None, repr=False, alias="baseMappings") def as_write(self) -> ModelTemplateWrite: @@ -80,11 +83,12 @@ def as_write(self) -> ModelTemplateWrite: space=self.space, external_id=self.external_id, data_record=DataRecordWrite(existing_version=self.data_record.version), - cog_shop_version=self.cog_shop_version, + version_=self.version_, shop_version=self.shop_version, watercourse=self.watercourse.as_write() if isinstance(self.watercourse, DomainModel) else self.watercourse, model=self.model, - source=self.source, + cog_shop_files_config=self.cog_shop_files_config, + extra_files=self.extra_files, base_mappings=[ base_mapping.as_write() if isinstance(base_mapping, DomainModel) else base_mapping for base_mapping in self.base_mappings or [] @@ -110,11 +114,12 @@ class ModelTemplateWrite(DomainModelWrite, protected_namespaces=()): space: The space where the node is located. external_id: The external id of the model template. data_record: The data record of the model template node. - cog_shop_version: The tag of the cogshop image to run + version_: The version of the model shop_version: The version of SHOP to run watercourse: The watercourse to run the model for model: The shop model file to use as template before applying base mapping - source: The source of the model, for example, 'resync' + cog_shop_files_config: Configuration for in what order to load the various files into pyshop + extra_files: Extra files related to a model template base_mappings: The base mappings for the model """ @@ -122,11 +127,12 @@ class ModelTemplateWrite(DomainModelWrite, protected_namespaces=()): node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "ModelTemplate" ) - cog_shop_version: str = Field(alias="cogShopVersion") + version_: Optional[str] = Field(None, alias="version") shop_version: str = Field(alias="shopVersion") watercourse: Union[WatercourseShopWrite, str, dm.NodeId, None] = Field(None, repr=False) model: Union[str, None] = None - source: Optional[str] = None + cog_shop_files_config: Optional[list[dict]] = Field(None, alias="cogShopFilesConfig") + extra_files: Optional[list[str]] = Field(None, alias="extraFiles") base_mappings: Union[list[MappingWrite], list[str], None] = Field(default=None, repr=False, alias="baseMappings") def _to_instances_write( @@ -145,8 +151,8 @@ def _to_instances_write( properties: dict[str, Any] = {} - if self.cog_shop_version is not None: - properties["cogShopVersion"] = self.cog_shop_version + if self.version_ is not None or write_none: + properties["version"] = self.version_ if self.shop_version is not None: properties["shopVersion"] = self.shop_version @@ -160,8 +166,11 @@ def _to_instances_write( if self.model is not None: properties["model"] = self.model - if self.source is not None or write_none: - properties["source"] = self.source + if self.cog_shop_files_config is not None or write_none: + properties["cogShopFilesConfig"] = self.cog_shop_files_config + + if self.extra_files is not None or write_none: + properties["extraFiles"] = self.extra_files if properties: this_node = dm.NodeApply( @@ -239,24 +248,22 @@ class ModelTemplateApplyList(ModelTemplateWriteList): ... def _create_model_template_filter( view_id: dm.ViewId, - cog_shop_version: str | list[str] | None = None, - cog_shop_version_prefix: str | None = None, + version_: str | list[str] | None = None, + version_prefix: str | None = None, shop_version: str | list[str] | None = None, shop_version_prefix: str | None = None, watercourse: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - source: str | list[str] | None = None, - source_prefix: str | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, ) -> dm.Filter | None: filters = [] - if isinstance(cog_shop_version, str): - filters.append(dm.filters.Equals(view_id.as_property_ref("cogShopVersion"), value=cog_shop_version)) - if cog_shop_version and isinstance(cog_shop_version, list): - filters.append(dm.filters.In(view_id.as_property_ref("cogShopVersion"), values=cog_shop_version)) - if cog_shop_version_prefix is not None: - filters.append(dm.filters.Prefix(view_id.as_property_ref("cogShopVersion"), value=cog_shop_version_prefix)) + if isinstance(version_, str): + filters.append(dm.filters.Equals(view_id.as_property_ref("version"), value=version_)) + if version_ and isinstance(version_, list): + filters.append(dm.filters.In(view_id.as_property_ref("version"), values=version_)) + if version_prefix is not None: + filters.append(dm.filters.Prefix(view_id.as_property_ref("version"), value=version_prefix)) if isinstance(shop_version, str): filters.append(dm.filters.Equals(view_id.as_property_ref("shopVersion"), value=shop_version)) if shop_version and isinstance(shop_version, list): @@ -290,12 +297,6 @@ def _create_model_template_filter( values=[{"space": item[0], "externalId": item[1]} for item in watercourse], ) ) - if isinstance(source, str): - filters.append(dm.filters.Equals(view_id.as_property_ref("source"), value=source)) - if source and isinstance(source, list): - filters.append(dm.filters.In(view_id.as_property_ref("source"), values=source)) - if source_prefix is not None: - filters.append(dm.filters.Prefix(view_id.as_property_ref("source"), value=source_prefix)) if external_id_prefix is not None: filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) if isinstance(space, str): diff --git a/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix.py b/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix.py index a68362f20..e50a86bee 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_multi_scenario_matrix.py @@ -22,7 +22,7 @@ if TYPE_CHECKING: from ._alert import Alert, AlertWrite from ._bid_method_shop_multi_scenario import BidMethodSHOPMultiScenario, BidMethodSHOPMultiScenarioWrite - from ._shop_result import SHOPResult, SHOPResultWrite + from ._price_prod_case import PriceProdCase, PriceProdCaseWrite __all__ = [ @@ -65,14 +65,16 @@ class MultiScenarioMatrix(BidMatrix): is_processed: Whether the bid matrix has been processed by the bid matrix processor or not alerts: The alert field. method: The method field. - shop_results: An array of results, one for each scenario. + scenario_results: An array of price/prod pairs, one for each scenario/case - this is needed for the frontend """ node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "DayAheadMultiScenarioMatrix" ) method: Union[BidMethodSHOPMultiScenario, str, dm.NodeId, None] = Field(None, repr=False) - shop_results: Union[list[SHOPResult], list[str], None] = Field(default=None, repr=False, alias="shopResults") + scenario_results: Union[list[PriceProdCase], list[str], None] = Field( + default=None, repr=False, alias="scenarioResults" + ) def as_write(self) -> MultiScenarioMatrixWrite: """Convert this read version of multi scenario matrix to the writing version.""" @@ -87,9 +89,9 @@ def as_write(self) -> MultiScenarioMatrixWrite: is_processed=self.is_processed, alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], method=self.method.as_write() if isinstance(self.method, DomainModel) else self.method, - shop_results=[ - shop_result.as_write() if isinstance(shop_result, DomainModel) else shop_result - for shop_result in self.shop_results or [] + scenario_results=[ + scenario_result.as_write() if isinstance(scenario_result, DomainModel) else scenario_result + for scenario_result in self.scenario_results or [] ], ) @@ -119,14 +121,16 @@ class MultiScenarioMatrixWrite(BidMatrixWrite): is_processed: Whether the bid matrix has been processed by the bid matrix processor or not alerts: The alert field. method: The method field. - shop_results: An array of results, one for each scenario. + scenario_results: An array of price/prod pairs, one for each scenario/case - this is needed for the frontend """ node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( "sp_powerops_types", "DayAheadMultiScenarioMatrix" ) method: Union[BidMethodSHOPMultiScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) - shop_results: Union[list[SHOPResultWrite], list[str], None] = Field(default=None, repr=False, alias="shopResults") + scenario_results: Union[list[PriceProdCaseWrite], list[str], None] = Field( + default=None, repr=False, alias="scenarioResults" + ) def _to_instances_write( self, @@ -188,10 +192,14 @@ def _to_instances_write( ) resources.extend(other_resources) - edge_type = dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.shopResults") - for shop_result in self.shop_results or []: + edge_type = dm.DirectRelationReference("sp_powerops_types", "MultiScenarioMatrix.scenarioResults") + for scenario_result in self.scenario_results or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=shop_result, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=scenario_result, + edge_type=edge_type, + view_by_read_class=view_by_read_class, ) resources.extend(other_resources) diff --git a/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_input.py b/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_input.py index b85a57a8e..898aa8886 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_input.py @@ -1,5 +1,6 @@ from __future__ import annotations +import datetime import warnings from typing import TYPE_CHECKING, Any, Literal, Optional, Union @@ -19,7 +20,7 @@ ) if TYPE_CHECKING: - from ._scenario_raw import ScenarioRaw, ScenarioRawWrite + from ._scenario import Scenario, ScenarioWrite __all__ = [ @@ -35,13 +36,17 @@ PreprocessorInputTextFields = Literal["process_id", "function_name", "function_call_id"] -PreprocessorInputFields = Literal["process_id", "process_step", "function_name", "function_call_id"] +PreprocessorInputFields = Literal[ + "process_id", "process_step", "function_name", "function_call_id", "shop_start", "shop_end" +] _PREPROCESSORINPUT_PROPERTIES_BY_FIELD = { "process_id": "processId", "process_step": "processStep", "function_name": "functionName", "function_call_id": "functionCallId", + "shop_start": "shopStart", + "shop_end": "shopEnd", } @@ -58,7 +63,9 @@ class PreprocessorInput(DomainModel): process_step: This is the step in the process. function_name: The name of the function function_call_id: The function call id - scenario_raw: The scenario that needs preprocessing before being sent to shop (has isReady flag set to false) + scenario: The scenario to run shop with + shop_start: Start date of bid period + shop_end: End date of bid period """ space: str = DEFAULT_INSTANCE_SPACE @@ -69,7 +76,9 @@ class PreprocessorInput(DomainModel): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - scenario_raw: Union[ScenarioRaw, str, dm.NodeId, None] = Field(None, repr=False, alias="scenarioRaw") + scenario: Union[Scenario, str, dm.NodeId, None] = Field(None, repr=False) + shop_start: Optional[datetime.date] = Field(None, alias="shopStart") + shop_end: Optional[datetime.date] = Field(None, alias="shopEnd") def as_write(self) -> PreprocessorInputWrite: """Convert this read version of preprocessor input to the writing version.""" @@ -81,9 +90,9 @@ def as_write(self) -> PreprocessorInputWrite: process_step=self.process_step, function_name=self.function_name, function_call_id=self.function_call_id, - scenario_raw=( - self.scenario_raw.as_write() if isinstance(self.scenario_raw, DomainModel) else self.scenario_raw - ), + scenario=self.scenario.as_write() if isinstance(self.scenario, DomainModel) else self.scenario, + shop_start=self.shop_start, + shop_end=self.shop_end, ) def as_apply(self) -> PreprocessorInputWrite: @@ -109,7 +118,9 @@ class PreprocessorInputWrite(DomainModelWrite): process_step: This is the step in the process. function_name: The name of the function function_call_id: The function call id - scenario_raw: The scenario that needs preprocessing before being sent to shop (has isReady flag set to false) + scenario: The scenario to run shop with + shop_start: Start date of bid period + shop_end: End date of bid period """ space: str = DEFAULT_INSTANCE_SPACE @@ -120,7 +131,9 @@ class PreprocessorInputWrite(DomainModelWrite): process_step: int = Field(alias="processStep") function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") - scenario_raw: Union[ScenarioRawWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="scenarioRaw") + scenario: Union[ScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) + shop_start: Optional[datetime.date] = Field(None, alias="shopStart") + shop_end: Optional[datetime.date] = Field(None, alias="shopEnd") def _to_instances_write( self, @@ -150,14 +163,18 @@ def _to_instances_write( if self.function_call_id is not None: properties["functionCallId"] = self.function_call_id - if self.scenario_raw is not None: - properties["scenarioRaw"] = { - "space": self.space if isinstance(self.scenario_raw, str) else self.scenario_raw.space, - "externalId": ( - self.scenario_raw if isinstance(self.scenario_raw, str) else self.scenario_raw.external_id - ), + if self.scenario is not None: + properties["scenario"] = { + "space": self.space if isinstance(self.scenario, str) else self.scenario.space, + "externalId": self.scenario if isinstance(self.scenario, str) else self.scenario.external_id, } + if self.shop_start is not None or write_none: + properties["shopStart"] = self.shop_start.isoformat() if self.shop_start else None + + if self.shop_end is not None or write_none: + properties["shopEnd"] = self.shop_end.isoformat() if self.shop_end else None + if properties: this_node = dm.NodeApply( space=self.space, @@ -174,8 +191,8 @@ def _to_instances_write( resources.nodes.append(this_node) cache.add(self.as_tuple_id()) - if isinstance(self.scenario_raw, DomainModelWrite): - other_resources = self.scenario_raw._to_instances_write(cache, view_by_read_class) + if isinstance(self.scenario, DomainModelWrite): + other_resources = self.scenario._to_instances_write(cache, view_by_read_class) resources.extend(other_resources) return resources @@ -231,7 +248,11 @@ def _create_preprocessor_input_filter( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario_raw: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + min_shop_start: datetime.date | None = None, + max_shop_start: datetime.date | None = None, + min_shop_end: datetime.date | None = None, + max_shop_end: datetime.date | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, @@ -259,31 +280,46 @@ def _create_preprocessor_input_filter( filters.append(dm.filters.In(view_id.as_property_ref("functionCallId"), values=function_call_id)) if function_call_id_prefix is not None: filters.append(dm.filters.Prefix(view_id.as_property_ref("functionCallId"), value=function_call_id_prefix)) - if scenario_raw and isinstance(scenario_raw, str): + if scenario and isinstance(scenario, str): filters.append( dm.filters.Equals( - view_id.as_property_ref("scenarioRaw"), - value={"space": DEFAULT_INSTANCE_SPACE, "externalId": scenario_raw}, + view_id.as_property_ref("scenario"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": scenario} ) ) - if scenario_raw and isinstance(scenario_raw, tuple): + if scenario and isinstance(scenario, tuple): filters.append( dm.filters.Equals( - view_id.as_property_ref("scenarioRaw"), value={"space": scenario_raw[0], "externalId": scenario_raw[1]} + view_id.as_property_ref("scenario"), value={"space": scenario[0], "externalId": scenario[1]} ) ) - if scenario_raw and isinstance(scenario_raw, list) and isinstance(scenario_raw[0], str): + if scenario and isinstance(scenario, list) and isinstance(scenario[0], str): filters.append( dm.filters.In( - view_id.as_property_ref("scenarioRaw"), - values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in scenario_raw], + view_id.as_property_ref("scenario"), + values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in scenario], ) ) - if scenario_raw and isinstance(scenario_raw, list) and isinstance(scenario_raw[0], tuple): + if scenario and isinstance(scenario, list) and isinstance(scenario[0], tuple): filters.append( dm.filters.In( - view_id.as_property_ref("scenarioRaw"), - values=[{"space": item[0], "externalId": item[1]} for item in scenario_raw], + view_id.as_property_ref("scenario"), + values=[{"space": item[0], "externalId": item[1]} for item in scenario], + ) + ) + if min_shop_start is not None or max_shop_start is not None: + filters.append( + dm.filters.Range( + view_id.as_property_ref("shopStart"), + gte=min_shop_start.isoformat() if min_shop_start else None, + lte=max_shop_start.isoformat() if max_shop_start else None, + ) + ) + if min_shop_end is not None or max_shop_end is not None: + filters.append( + dm.filters.Range( + view_id.as_property_ref("shopEnd"), + gte=min_shop_end.isoformat() if min_shop_end else None, + lte=max_shop_end.isoformat() if max_shop_end else None, ) ) if external_id_prefix is not None: diff --git a/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_output.py b/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_output.py index d5bd10213..8c95ffbe9 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_output.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_preprocessor_output.py @@ -20,8 +20,8 @@ if TYPE_CHECKING: from ._alert import Alert, AlertWrite + from ._case import Case, CaseWrite from ._preprocessor_input import PreprocessorInput, PreprocessorInputWrite - from ._scenario import Scenario, ScenarioWrite __all__ = [ @@ -61,7 +61,7 @@ class PreprocessorOutput(DomainModel): function_name: The name of the function function_call_id: The function call id alerts: An array of calculation level Alerts. - scenario: The prepped and processed scenario to send to shop trigger + case: The Case to trigger shop with input_: The prepped and processed scenario to send to shop trigger """ @@ -74,7 +74,7 @@ class PreprocessorOutput(DomainModel): function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) - scenario: Union[Scenario, str, dm.NodeId, None] = Field(None, repr=False) + case: Union[Case, str, dm.NodeId, None] = Field(None, repr=False) input_: Union[PreprocessorInput, str, dm.NodeId, None] = Field(None, repr=False, alias="input") def as_write(self) -> PreprocessorOutputWrite: @@ -88,7 +88,7 @@ def as_write(self) -> PreprocessorOutputWrite: function_name=self.function_name, function_call_id=self.function_call_id, alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], - scenario=self.scenario.as_write() if isinstance(self.scenario, DomainModel) else self.scenario, + case=self.case.as_write() if isinstance(self.case, DomainModel) else self.case, input_=self.input_.as_write() if isinstance(self.input_, DomainModel) else self.input_, ) @@ -116,7 +116,7 @@ class PreprocessorOutputWrite(DomainModelWrite): function_name: The name of the function function_call_id: The function call id alerts: An array of calculation level Alerts. - scenario: The prepped and processed scenario to send to shop trigger + case: The Case to trigger shop with input_: The prepped and processed scenario to send to shop trigger """ @@ -129,7 +129,7 @@ class PreprocessorOutputWrite(DomainModelWrite): function_name: str = Field(alias="functionName") function_call_id: str = Field(alias="functionCallId") alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) - scenario: Union[ScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) + case: Union[CaseWrite, str, dm.NodeId, None] = Field(None, repr=False) input_: Union[PreprocessorInputWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="input") def _to_instances_write( @@ -160,10 +160,10 @@ def _to_instances_write( if self.function_call_id is not None: properties["functionCallId"] = self.function_call_id - if self.scenario is not None: - properties["scenario"] = { - "space": self.space if isinstance(self.scenario, str) else self.scenario.space, - "externalId": self.scenario if isinstance(self.scenario, str) else self.scenario.external_id, + if self.case is not None: + properties["case"] = { + "space": self.space if isinstance(self.case, str) else self.case.space, + "externalId": self.case if isinstance(self.case, str) else self.case.external_id, } if self.input_ is not None: @@ -195,8 +195,8 @@ def _to_instances_write( ) resources.extend(other_resources) - if isinstance(self.scenario, DomainModelWrite): - other_resources = self.scenario._to_instances_write(cache, view_by_read_class) + if isinstance(self.case, DomainModelWrite): + other_resources = self.case._to_instances_write(cache, view_by_read_class) resources.extend(other_resources) if isinstance(self.input_, DomainModelWrite): @@ -256,7 +256,7 @@ def _create_preprocessor_output_filter( function_name_prefix: str | None = None, function_call_id: str | list[str] | None = None, function_call_id_prefix: str | None = None, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, input_: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, @@ -285,30 +285,27 @@ def _create_preprocessor_output_filter( filters.append(dm.filters.In(view_id.as_property_ref("functionCallId"), values=function_call_id)) if function_call_id_prefix is not None: filters.append(dm.filters.Prefix(view_id.as_property_ref("functionCallId"), value=function_call_id_prefix)) - if scenario and isinstance(scenario, str): + if case and isinstance(case, str): filters.append( dm.filters.Equals( - view_id.as_property_ref("scenario"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": scenario} + view_id.as_property_ref("case"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": case} ) ) - if scenario and isinstance(scenario, tuple): + if case and isinstance(case, tuple): filters.append( - dm.filters.Equals( - view_id.as_property_ref("scenario"), value={"space": scenario[0], "externalId": scenario[1]} - ) + dm.filters.Equals(view_id.as_property_ref("case"), value={"space": case[0], "externalId": case[1]}) ) - if scenario and isinstance(scenario, list) and isinstance(scenario[0], str): + if case and isinstance(case, list) and isinstance(case[0], str): filters.append( dm.filters.In( - view_id.as_property_ref("scenario"), - values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in scenario], + view_id.as_property_ref("case"), + values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in case], ) ) - if scenario and isinstance(scenario, list) and isinstance(scenario[0], tuple): + if case and isinstance(case, list) and isinstance(case[0], tuple): filters.append( dm.filters.In( - view_id.as_property_ref("scenario"), - values=[{"space": item[0], "externalId": item[1]} for item in scenario], + view_id.as_property_ref("case"), values=[{"space": item[0], "externalId": item[1]} for item in case] ) ) if input_ and isinstance(input_, str): diff --git a/cognite/powerops/client/_generated/v1/data_classes/_price_prod_case.py b/cognite/powerops/client/_generated/v1/data_classes/_price_prod_case.py new file mode 100644 index 000000000..d6cc8fcb3 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/data_classes/_price_prod_case.py @@ -0,0 +1,255 @@ +from __future__ import annotations + +import warnings +from typing import TYPE_CHECKING, Any, Literal, Optional, Union + +from cognite.client import data_modeling as dm +from cognite.client.data_classes import TimeSeries as CogniteTimeSeries +from pydantic import Field + +from ._core import ( + DEFAULT_INSTANCE_SPACE, + DataRecordWrite, + DomainModel, + DomainModelCore, + DomainModelWrite, + DomainModelWriteList, + DomainModelList, + DomainRelationWrite, + ResourcesWrite, + TimeSeries, +) + +if TYPE_CHECKING: + from ._case import Case, CaseWrite + + +__all__ = [ + "PriceProdCase", + "PriceProdCaseWrite", + "PriceProdCaseApply", + "PriceProdCaseList", + "PriceProdCaseWriteList", + "PriceProdCaseApplyList", + "PriceProdCaseFields", + "PriceProdCaseTextFields", +] + + +PriceProdCaseTextFields = Literal["price", "production"] +PriceProdCaseFields = Literal["price", "production"] + +_PRICEPRODCASE_PROPERTIES_BY_FIELD = { + "price": "price", + "production": "production", +} + + +class PriceProdCase(DomainModel): + """This represents the reading version of price prod case. + + It is used to when data is retrieved from CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the price prod case. + data_record: The data record of the price prod case node. + price: The price field. + production: The production field. + case: The case field. + """ + + space: str = DEFAULT_INSTANCE_SPACE + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( + "sp_powerops_types", "PriceProdCase" + ) + price: Union[TimeSeries, str, None] = None + production: Union[TimeSeries, str, None] = None + case: Union[Case, str, dm.NodeId, None] = Field(None, repr=False) + + def as_write(self) -> PriceProdCaseWrite: + """Convert this read version of price prod case to the writing version.""" + return PriceProdCaseWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=self.data_record.version), + price=self.price, + production=self.production, + case=self.case.as_write() if isinstance(self.case, DomainModel) else self.case, + ) + + def as_apply(self) -> PriceProdCaseWrite: + """Convert this read version of price prod case to the writing version.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class PriceProdCaseWrite(DomainModelWrite): + """This represents the writing version of price prod case. + + It is used to when data is sent to CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the price prod case. + data_record: The data record of the price prod case node. + price: The price field. + production: The production field. + case: The case field. + """ + + space: str = DEFAULT_INSTANCE_SPACE + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( + "sp_powerops_types", "PriceProdCase" + ) + price: Union[TimeSeries, str, None] = None + production: Union[TimeSeries, str, None] = None + case: Union[CaseWrite, str, dm.NodeId, None] = Field(None, repr=False) + + def _to_instances_write( + self, + cache: set[tuple[str, str]], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, + write_none: bool = False, + ) -> ResourcesWrite: + resources = ResourcesWrite() + if self.as_tuple_id() in cache: + return resources + + write_view = (view_by_read_class or {}).get( + PriceProdCase, dm.ViewId("sp_powerops_models", "PriceProdCase", "1") + ) + + properties: dict[str, Any] = {} + + if self.price is not None or write_none: + if isinstance(self.price, str) or self.price is None: + properties["price"] = self.price + else: + properties["price"] = self.price.external_id + + if self.production is not None or write_none: + if isinstance(self.production, str) or self.production is None: + properties["production"] = self.production + else: + properties["production"] = self.production.external_id + + if self.case is not None: + properties["case"] = { + "space": self.space if isinstance(self.case, str) else self.case.space, + "externalId": self.case if isinstance(self.case, str) else self.case.external_id, + } + + if properties: + this_node = dm.NodeApply( + space=self.space, + external_id=self.external_id, + existing_version=self.data_record.existing_version, + type=self.node_type, + sources=[ + dm.NodeOrEdgeData( + source=write_view, + properties=properties, + ) + ], + ) + resources.nodes.append(this_node) + cache.add(self.as_tuple_id()) + + if isinstance(self.case, DomainModelWrite): + other_resources = self.case._to_instances_write(cache, view_by_read_class) + resources.extend(other_resources) + + if isinstance(self.price, CogniteTimeSeries): + resources.time_series.append(self.price) + + if isinstance(self.production, CogniteTimeSeries): + resources.time_series.append(self.production) + + return resources + + +class PriceProdCaseApply(PriceProdCaseWrite): + def __new__(cls, *args, **kwargs) -> PriceProdCaseApply: + warnings.warn( + "PriceProdCaseApply is deprecated and will be removed in v1.0. Use PriceProdCaseWrite instead." + "The motivation for this change is that Write is a more descriptive name for the writing version of the" + "PriceProdCase.", + UserWarning, + stacklevel=2, + ) + return super().__new__(cls) + + +class PriceProdCaseList(DomainModelList[PriceProdCase]): + """List of price prod cases in the read version.""" + + _INSTANCE = PriceProdCase + + def as_write(self) -> PriceProdCaseWriteList: + """Convert these read versions of price prod case to the writing versions.""" + return PriceProdCaseWriteList([node.as_write() for node in self.data]) + + def as_apply(self) -> PriceProdCaseWriteList: + """Convert these read versions of primitive nullable to the writing versions.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class PriceProdCaseWriteList(DomainModelWriteList[PriceProdCaseWrite]): + """List of price prod cases in the writing version.""" + + _INSTANCE = PriceProdCaseWrite + + +class PriceProdCaseApplyList(PriceProdCaseWriteList): ... + + +def _create_price_prod_case_filter( + view_id: dm.ViewId, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + filter: dm.Filter | None = None, +) -> dm.Filter | None: + filters = [] + if case and isinstance(case, str): + filters.append( + dm.filters.Equals( + view_id.as_property_ref("case"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": case} + ) + ) + if case and isinstance(case, tuple): + filters.append( + dm.filters.Equals(view_id.as_property_ref("case"), value={"space": case[0], "externalId": case[1]}) + ) + if case and isinstance(case, list) and isinstance(case[0], str): + filters.append( + dm.filters.In( + view_id.as_property_ref("case"), + values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in case], + ) + ) + if case and isinstance(case, list) and isinstance(case[0], tuple): + filters.append( + dm.filters.In( + view_id.as_property_ref("case"), values=[{"space": item[0], "externalId": item[1]} for item in case] + ) + ) + if external_id_prefix is not None: + filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) + if isinstance(space, str): + filters.append(dm.filters.Equals(["node", "space"], value=space)) + if space and isinstance(space, list): + filters.append(dm.filters.In(["node", "space"], values=space)) + if filter: + filters.append(filter) + return dm.filters.And(*filters) if filters else None diff --git a/cognite/powerops/client/_generated/v1/data_classes/_scenario.py b/cognite/powerops/client/_generated/v1/data_classes/_scenario.py index 8c04387ac..0ba16982f 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_scenario.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_scenario.py @@ -1,6 +1,5 @@ from __future__ import annotations -import datetime import warnings from typing import TYPE_CHECKING, Any, Literal, Optional, Union @@ -20,6 +19,7 @@ ) if TYPE_CHECKING: + from ._commands import Commands, CommandsWrite from ._mapping import Mapping, MappingWrite from ._model_template import ModelTemplate, ModelTemplateWrite @@ -36,44 +36,12 @@ ] -ScenarioTextFields = Literal[ - "name", - "shop_version", - "model_file", - "commands", - "extra_files", - "source", - "shop_start_specification", - "shop_end_specification", -] -ScenarioFields = Literal[ - "name", - "shop_version", - "model_file", - "commands", - "extra_files", - "source", - "shop_start_specification", - "shop_end_specification", - "shop_start", - "shop_end", - "bid_date", - "is_ready", -] +ScenarioTextFields = Literal["name", "source"] +ScenarioFields = Literal["name", "source"] _SCENARIO_PROPERTIES_BY_FIELD = { "name": "name", - "shop_version": "shopVersion", - "model_file": "modelFile", - "commands": "commands", - "extra_files": "extraFiles", "source": "source", - "shop_start_specification": "shopStartSpecification", - "shop_end_specification": "shopEndSpecification", - "shop_start": "shopStart", - "shop_end": "shopEnd", - "bid_date": "bidDate", - "is_ready": "isReady", } @@ -87,36 +55,18 @@ class Scenario(DomainModel, protected_namespaces=()): external_id: The external id of the scenario. data_record: The data record of the scenario node. name: The name of the scenario to run - shop_version: The shop version to use when running the scenario model_template: The model template to use when running the scenario - model_file: The final model file to use when running the scenario (after modelTemplate is processed) - commands: The commands to run when running the scenario - extra_files: Extra files to include when running the scenario + commands: The commands to run source: The source of the scenario - shop_start_specification: The shop start specification - shop_end_specification: The shop end specification - shop_start: The shop start time - shop_end: The shop end time - bid_date: The bid date of the scenario - is_ready: Whether the scenario is ready to be run mappings_override: An array of base mappings to override in shop model file """ space: str = DEFAULT_INSTANCE_SPACE node_type: Union[dm.DirectRelationReference, None] = None name: str - shop_version: Optional[str] = Field(None, alias="shopVersion") model_template: Union[ModelTemplate, str, dm.NodeId, None] = Field(None, repr=False, alias="modelTemplate") - model_file: Union[str, None] = Field(None, alias="modelFile") - commands: Optional[list[str]] = None - extra_files: Optional[list[str]] = Field(None, alias="extraFiles") + commands: Union[Commands, str, dm.NodeId, None] = Field(None, repr=False) source: Optional[str] = None - shop_start_specification: Optional[str] = Field(None, alias="shopStartSpecification") - shop_end_specification: Optional[str] = Field(None, alias="shopEndSpecification") - shop_start: Optional[datetime.datetime] = Field(None, alias="shopStart") - shop_end: Optional[datetime.datetime] = Field(None, alias="shopEnd") - bid_date: Optional[datetime.date] = Field(None, alias="bidDate") - is_ready: Optional[bool] = Field(None, alias="isReady") mappings_override: Union[list[Mapping], list[str], None] = Field(default=None, repr=False, alias="mappingsOverride") def as_write(self) -> ScenarioWrite: @@ -126,20 +76,11 @@ def as_write(self) -> ScenarioWrite: external_id=self.external_id, data_record=DataRecordWrite(existing_version=self.data_record.version), name=self.name, - shop_version=self.shop_version, model_template=( self.model_template.as_write() if isinstance(self.model_template, DomainModel) else self.model_template ), - model_file=self.model_file, - commands=self.commands, - extra_files=self.extra_files, + commands=self.commands.as_write() if isinstance(self.commands, DomainModel) else self.commands, source=self.source, - shop_start_specification=self.shop_start_specification, - shop_end_specification=self.shop_end_specification, - shop_start=self.shop_start, - shop_end=self.shop_end, - bid_date=self.bid_date, - is_ready=self.is_ready, mappings_override=[ mappings_override.as_write() if isinstance(mappings_override, DomainModel) else mappings_override for mappings_override in self.mappings_override or [] @@ -166,36 +107,18 @@ class ScenarioWrite(DomainModelWrite, protected_namespaces=()): external_id: The external id of the scenario. data_record: The data record of the scenario node. name: The name of the scenario to run - shop_version: The shop version to use when running the scenario model_template: The model template to use when running the scenario - model_file: The final model file to use when running the scenario (after modelTemplate is processed) - commands: The commands to run when running the scenario - extra_files: Extra files to include when running the scenario + commands: The commands to run source: The source of the scenario - shop_start_specification: The shop start specification - shop_end_specification: The shop end specification - shop_start: The shop start time - shop_end: The shop end time - bid_date: The bid date of the scenario - is_ready: Whether the scenario is ready to be run mappings_override: An array of base mappings to override in shop model file """ space: str = DEFAULT_INSTANCE_SPACE node_type: Union[dm.DirectRelationReference, None] = None name: str - shop_version: Optional[str] = Field(None, alias="shopVersion") model_template: Union[ModelTemplateWrite, str, dm.NodeId, None] = Field(None, repr=False, alias="modelTemplate") - model_file: Union[str, None] = Field(None, alias="modelFile") - commands: Optional[list[str]] = None - extra_files: Optional[list[str]] = Field(None, alias="extraFiles") + commands: Union[CommandsWrite, str, dm.NodeId, None] = Field(None, repr=False) source: Optional[str] = None - shop_start_specification: Optional[str] = Field(None, alias="shopStartSpecification") - shop_end_specification: Optional[str] = Field(None, alias="shopEndSpecification") - shop_start: Optional[datetime.datetime] = Field(None, alias="shopStart") - shop_end: Optional[datetime.datetime] = Field(None, alias="shopEnd") - bid_date: Optional[datetime.date] = Field(None, alias="bidDate") - is_ready: Optional[bool] = Field(None, alias="isReady") mappings_override: Union[list[MappingWrite], list[str], None] = Field( default=None, repr=False, alias="mappingsOverride" ) @@ -217,9 +140,6 @@ def _to_instances_write( if self.name is not None: properties["name"] = self.name - if self.shop_version is not None or write_none: - properties["shopVersion"] = self.shop_version - if self.model_template is not None: properties["modelTemplate"] = { "space": self.space if isinstance(self.model_template, str) else self.model_template.space, @@ -228,36 +148,15 @@ def _to_instances_write( ), } - if self.model_file is not None: - properties["modelFile"] = self.model_file - - if self.commands is not None or write_none: - properties["commands"] = self.commands - - if self.extra_files is not None or write_none: - properties["extraFiles"] = self.extra_files + if self.commands is not None: + properties["commands"] = { + "space": self.space if isinstance(self.commands, str) else self.commands.space, + "externalId": self.commands if isinstance(self.commands, str) else self.commands.external_id, + } if self.source is not None or write_none: properties["source"] = self.source - if self.shop_start_specification is not None or write_none: - properties["shopStartSpecification"] = self.shop_start_specification - - if self.shop_end_specification is not None or write_none: - properties["shopEndSpecification"] = self.shop_end_specification - - if self.shop_start is not None or write_none: - properties["shopStart"] = self.shop_start.isoformat(timespec="milliseconds") if self.shop_start else None - - if self.shop_end is not None or write_none: - properties["shopEnd"] = self.shop_end.isoformat(timespec="milliseconds") if self.shop_end else None - - if self.bid_date is not None or write_none: - properties["bidDate"] = self.bid_date.isoformat() if self.bid_date else None - - if self.is_ready is not None or write_none: - properties["isReady"] = self.is_ready - if properties: this_node = dm.NodeApply( space=self.space, @@ -289,6 +188,10 @@ def _to_instances_write( other_resources = self.model_template._to_instances_write(cache, view_by_read_class) resources.extend(other_resources) + if isinstance(self.commands, DomainModelWrite): + other_resources = self.commands._to_instances_write(cache, view_by_read_class) + resources.extend(other_resources) + return resources @@ -336,22 +239,10 @@ def _create_scenario_filter( view_id: dm.ViewId, name: str | list[str] | None = None, name_prefix: str | None = None, - shop_version: str | list[str] | None = None, - shop_version_prefix: str | None = None, model_template: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + commands: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, source: str | list[str] | None = None, source_prefix: str | None = None, - shop_start_specification: str | list[str] | None = None, - shop_start_specification_prefix: str | None = None, - shop_end_specification: str | list[str] | None = None, - shop_end_specification_prefix: str | None = None, - min_shop_start: datetime.datetime | None = None, - max_shop_start: datetime.datetime | None = None, - min_shop_end: datetime.datetime | None = None, - max_shop_end: datetime.datetime | None = None, - min_bid_date: datetime.date | None = None, - max_bid_date: datetime.date | None = None, - is_ready: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, @@ -363,12 +254,6 @@ def _create_scenario_filter( filters.append(dm.filters.In(view_id.as_property_ref("name"), values=name)) if name_prefix is not None: filters.append(dm.filters.Prefix(view_id.as_property_ref("name"), value=name_prefix)) - if isinstance(shop_version, str): - filters.append(dm.filters.Equals(view_id.as_property_ref("shopVersion"), value=shop_version)) - if shop_version and isinstance(shop_version, list): - filters.append(dm.filters.In(view_id.as_property_ref("shopVersion"), values=shop_version)) - if shop_version_prefix is not None: - filters.append(dm.filters.Prefix(view_id.as_property_ref("shopVersion"), value=shop_version_prefix)) if model_template and isinstance(model_template, str): filters.append( dm.filters.Equals( @@ -397,58 +282,38 @@ def _create_scenario_filter( values=[{"space": item[0], "externalId": item[1]} for item in model_template], ) ) - if isinstance(source, str): - filters.append(dm.filters.Equals(view_id.as_property_ref("source"), value=source)) - if source and isinstance(source, list): - filters.append(dm.filters.In(view_id.as_property_ref("source"), values=source)) - if source_prefix is not None: - filters.append(dm.filters.Prefix(view_id.as_property_ref("source"), value=source_prefix)) - if isinstance(shop_start_specification, str): - filters.append( - dm.filters.Equals(view_id.as_property_ref("shopStartSpecification"), value=shop_start_specification) - ) - if shop_start_specification and isinstance(shop_start_specification, list): - filters.append( - dm.filters.In(view_id.as_property_ref("shopStartSpecification"), values=shop_start_specification) - ) - if shop_start_specification_prefix is not None: + if commands and isinstance(commands, str): filters.append( - dm.filters.Prefix(view_id.as_property_ref("shopStartSpecification"), value=shop_start_specification_prefix) - ) - if isinstance(shop_end_specification, str): - filters.append(dm.filters.Equals(view_id.as_property_ref("shopEndSpecification"), value=shop_end_specification)) - if shop_end_specification and isinstance(shop_end_specification, list): - filters.append(dm.filters.In(view_id.as_property_ref("shopEndSpecification"), values=shop_end_specification)) - if shop_end_specification_prefix is not None: - filters.append( - dm.filters.Prefix(view_id.as_property_ref("shopEndSpecification"), value=shop_end_specification_prefix) + dm.filters.Equals( + view_id.as_property_ref("commands"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": commands} + ) ) - if min_shop_start is not None or max_shop_start is not None: + if commands and isinstance(commands, tuple): filters.append( - dm.filters.Range( - view_id.as_property_ref("shopStart"), - gte=min_shop_start.isoformat(timespec="milliseconds") if min_shop_start else None, - lte=max_shop_start.isoformat(timespec="milliseconds") if max_shop_start else None, + dm.filters.Equals( + view_id.as_property_ref("commands"), value={"space": commands[0], "externalId": commands[1]} ) ) - if min_shop_end is not None or max_shop_end is not None: + if commands and isinstance(commands, list) and isinstance(commands[0], str): filters.append( - dm.filters.Range( - view_id.as_property_ref("shopEnd"), - gte=min_shop_end.isoformat(timespec="milliseconds") if min_shop_end else None, - lte=max_shop_end.isoformat(timespec="milliseconds") if max_shop_end else None, + dm.filters.In( + view_id.as_property_ref("commands"), + values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in commands], ) ) - if min_bid_date is not None or max_bid_date is not None: + if commands and isinstance(commands, list) and isinstance(commands[0], tuple): filters.append( - dm.filters.Range( - view_id.as_property_ref("bidDate"), - gte=min_bid_date.isoformat() if min_bid_date else None, - lte=max_bid_date.isoformat() if max_bid_date else None, + dm.filters.In( + view_id.as_property_ref("commands"), + values=[{"space": item[0], "externalId": item[1]} for item in commands], ) ) - if isinstance(is_ready, bool): - filters.append(dm.filters.Equals(view_id.as_property_ref("isReady"), value=is_ready)) + if isinstance(source, str): + filters.append(dm.filters.Equals(view_id.as_property_ref("source"), value=source)) + if source and isinstance(source, list): + filters.append(dm.filters.In(view_id.as_property_ref("source"), values=source)) + if source_prefix is not None: + filters.append(dm.filters.Prefix(view_id.as_property_ref("source"), value=source_prefix)) if external_id_prefix is not None: filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) if isinstance(space, str): diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_input.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_input.py index 2314bc37d..40b29a11e 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_input.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_partial_bid_calculation_input.py @@ -19,10 +19,9 @@ ) if TYPE_CHECKING: - from ._alert import Alert, AlertWrite from ._market_configuration import MarketConfiguration, MarketConfigurationWrite from ._plant_shop import PlantShop, PlantShopWrite - from ._shop_result import SHOPResult, SHOPResultWrite + from ._shop_result_price_prod import SHOPResultPriceProd, SHOPResultPriceProdWrite __all__ = [ @@ -38,13 +37,16 @@ ShopPartialBidCalculationInputTextFields = Literal["process_id", "function_name", "function_call_id"] -ShopPartialBidCalculationInputFields = Literal["process_id", "process_step", "function_name", "function_call_id"] +ShopPartialBidCalculationInputFields = Literal[ + "process_id", "process_step", "function_name", "function_call_id", "step_enabled" +] _SHOPPARTIALBIDCALCULATIONINPUT_PROPERTIES_BY_FIELD = { "process_id": "processId", "process_step": "processStep", "function_name": "functionName", "function_call_id": "functionCallId", + "step_enabled": "stepEnabled", } @@ -61,10 +63,10 @@ class ShopPartialBidCalculationInput(DomainModel): process_step: This is the step in the process. function_name: The name of the function function_call_id: The function call id - plant: The plant to calculate the partial bid for + plant: The plant to calculate the partial bid for. Extract price/prod timeseries from Shop Results market_configuration: The market configuration to be used to generate the partial bid matrix - alerts: An array of calculation level Alerts. - shop_results: An array of shop results. + step_enabled: Whether the step is enabled or not + shop_result_price_prod: An array of shop results with price/prod timeserires pairs for all plants included in the respective shop scenario """ space: str = DEFAULT_INSTANCE_SPACE @@ -79,8 +81,10 @@ class ShopPartialBidCalculationInput(DomainModel): market_configuration: Union[MarketConfiguration, str, dm.NodeId, None] = Field( None, repr=False, alias="marketConfiguration" ) - alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) - shop_results: Union[list[SHOPResult], list[str], None] = Field(default=None, repr=False, alias="shopResults") + step_enabled: Optional[bool] = Field(None, alias="stepEnabled") + shop_result_price_prod: Union[list[SHOPResultPriceProd], list[str], None] = Field( + default=None, repr=False, alias="shopResultPriceProd" + ) def as_write(self) -> ShopPartialBidCalculationInputWrite: """Convert this read version of shop partial bid calculation input to the writing version.""" @@ -98,10 +102,14 @@ def as_write(self) -> ShopPartialBidCalculationInputWrite: if isinstance(self.market_configuration, DomainModel) else self.market_configuration ), - alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], - shop_results=[ - shop_result.as_write() if isinstance(shop_result, DomainModel) else shop_result - for shop_result in self.shop_results or [] + step_enabled=self.step_enabled, + shop_result_price_prod=[ + ( + shop_result_price_prod.as_write() + if isinstance(shop_result_price_prod, DomainModel) + else shop_result_price_prod + ) + for shop_result_price_prod in self.shop_result_price_prod or [] ], ) @@ -128,10 +136,10 @@ class ShopPartialBidCalculationInputWrite(DomainModelWrite): process_step: This is the step in the process. function_name: The name of the function function_call_id: The function call id - plant: The plant to calculate the partial bid for + plant: The plant to calculate the partial bid for. Extract price/prod timeseries from Shop Results market_configuration: The market configuration to be used to generate the partial bid matrix - alerts: An array of calculation level Alerts. - shop_results: An array of shop results. + step_enabled: Whether the step is enabled or not + shop_result_price_prod: An array of shop results with price/prod timeserires pairs for all plants included in the respective shop scenario """ space: str = DEFAULT_INSTANCE_SPACE @@ -146,8 +154,10 @@ class ShopPartialBidCalculationInputWrite(DomainModelWrite): market_configuration: Union[MarketConfigurationWrite, str, dm.NodeId, None] = Field( None, repr=False, alias="marketConfiguration" ) - alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) - shop_results: Union[list[SHOPResultWrite], list[str], None] = Field(default=None, repr=False, alias="shopResults") + step_enabled: Optional[bool] = Field(None, alias="stepEnabled") + shop_result_price_prod: Union[list[SHOPResultPriceProdWrite], list[str], None] = Field( + default=None, repr=False, alias="shopResultPriceProd" + ) def _to_instances_write( self, @@ -193,6 +203,9 @@ def _to_instances_write( ), } + if self.step_enabled is not None or write_none: + properties["stepEnabled"] = self.step_enabled + if properties: this_node = dm.NodeApply( space=self.space, @@ -209,17 +222,14 @@ def _to_instances_write( resources.nodes.append(this_node) cache.add(self.as_tuple_id()) - edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") - for alert in self.alerts or []: - other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class - ) - resources.extend(other_resources) - - edge_type = dm.DirectRelationReference("sp_powerops_types", "SHOPResult") - for shop_result in self.shop_results or []: + edge_type = dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd") + for shop_result_price_prod in self.shop_result_price_prod or []: other_resources = DomainRelationWrite.from_edge_to_resources( - cache, start_node=self, end_node=shop_result, edge_type=edge_type, view_by_read_class=view_by_read_class + cache, + start_node=self, + end_node=shop_result_price_prod, + edge_type=edge_type, + view_by_read_class=view_by_read_class, ) resources.extend(other_resources) @@ -286,6 +296,7 @@ def _create_shop_partial_bid_calculation_input_filter( function_call_id_prefix: str | None = None, plant: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, market_configuration: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + step_enabled: bool | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, @@ -364,6 +375,8 @@ def _create_shop_partial_bid_calculation_input_filter( values=[{"space": item[0], "externalId": item[1]} for item in market_configuration], ) ) + if isinstance(step_enabled, bool): + filters.append(dm.filters.Equals(view_id.as_property_ref("stepEnabled"), value=step_enabled)) if external_id_prefix is not None: filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) if isinstance(space, str): diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_result.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_result.py index 4f7255a33..19ed35624 100644 --- a/cognite/powerops/client/_generated/v1/data_classes/_shop_result.py +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_result.py @@ -22,8 +22,7 @@ if TYPE_CHECKING: from ._alert import Alert, AlertWrite - from ._price_scenario import PriceScenario, PriceScenarioWrite - from ._scenario import Scenario, ScenarioWrite + from ._case import Case, CaseWrite __all__ = [ @@ -38,13 +37,20 @@ ] -SHOPResultTextFields = Literal["production", "price", "objective_sequence"] -SHOPResultFields = Literal["production", "price", "objective_sequence"] +SHOPResultTextFields = Literal[ + "output_timeseries", "objective_sequence", "pre_run", "post_run", "shop_messages", "cplex_logs" +] +SHOPResultFields = Literal[ + "output_timeseries", "objective_sequence", "pre_run", "post_run", "shop_messages", "cplex_logs" +] _SHOPRESULT_PROPERTIES_BY_FIELD = { - "production": "production", - "price": "price", + "output_timeseries": "outputTimeseries", "objective_sequence": "objectiveSequence", + "pre_run": "preRun", + "post_run": "postRun", + "shop_messages": "shopMessages", + "cplex_logs": "cplexLogs", } @@ -57,21 +63,25 @@ class SHOPResult(DomainModel): space: The space where the node is located. external_id: The external id of the shop result. data_record: The data record of the shop result node. - scenario: The Shop scenario that was used to produce this result - price_scenario: The price scenario that was used to produce this result - production: The result production timeseries from a SHOP run - price: The result price timeseries from a SHOP run + case: The case that was used to produce this result + output_timeseries: A general placeholder for all timeseries that stem from a shop run objective_sequence: The sequence of the objective function + pre_run: The pre-run data for the SHOP run + post_run: The post-run data for the SHOP run + shop_messages: The messages from the SHOP run + cplex_logs: The logs from CPLEX alerts: An array of calculation level Alerts. """ space: str = DEFAULT_INSTANCE_SPACE node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference("sp_powerops_types", "SHOPResult") - scenario: Union[Scenario, str, dm.NodeId, None] = Field(None, repr=False) - price_scenario: Union[PriceScenario, str, dm.NodeId, None] = Field(None, repr=False) - production: Union[list[TimeSeries], list[str], None] = None - price: Union[list[TimeSeries], list[str], None] = None + case: Union[Case, str, dm.NodeId, None] = Field(None, repr=False) + output_timeseries: Union[list[TimeSeries], list[str], None] = Field(None, alias="outputTimeseries") objective_sequence: Union[str, None] = Field(None, alias="objectiveSequence") + pre_run: Union[str, None] = Field(None, alias="preRun") + post_run: Union[str, None] = Field(None, alias="postRun") + shop_messages: Union[str, None] = Field(None, alias="shopMessages") + cplex_logs: Union[str, None] = Field(None, alias="cplexLogs") alerts: Union[list[Alert], list[str], None] = Field(default=None, repr=False) def as_write(self) -> SHOPResultWrite: @@ -80,13 +90,13 @@ def as_write(self) -> SHOPResultWrite: space=self.space, external_id=self.external_id, data_record=DataRecordWrite(existing_version=self.data_record.version), - scenario=self.scenario.as_write() if isinstance(self.scenario, DomainModel) else self.scenario, - price_scenario=( - self.price_scenario.as_write() if isinstance(self.price_scenario, DomainModel) else self.price_scenario - ), - production=self.production, - price=self.price, + case=self.case.as_write() if isinstance(self.case, DomainModel) else self.case, + output_timeseries=self.output_timeseries, objective_sequence=self.objective_sequence, + pre_run=self.pre_run, + post_run=self.post_run, + shop_messages=self.shop_messages, + cplex_logs=self.cplex_logs, alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], ) @@ -109,21 +119,25 @@ class SHOPResultWrite(DomainModelWrite): space: The space where the node is located. external_id: The external id of the shop result. data_record: The data record of the shop result node. - scenario: The Shop scenario that was used to produce this result - price_scenario: The price scenario that was used to produce this result - production: The result production timeseries from a SHOP run - price: The result price timeseries from a SHOP run + case: The case that was used to produce this result + output_timeseries: A general placeholder for all timeseries that stem from a shop run objective_sequence: The sequence of the objective function + pre_run: The pre-run data for the SHOP run + post_run: The post-run data for the SHOP run + shop_messages: The messages from the SHOP run + cplex_logs: The logs from CPLEX alerts: An array of calculation level Alerts. """ space: str = DEFAULT_INSTANCE_SPACE node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference("sp_powerops_types", "SHOPResult") - scenario: Union[ScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) - price_scenario: Union[PriceScenarioWrite, str, dm.NodeId, None] = Field(None, repr=False) - production: Union[list[TimeSeries], list[str], None] = None - price: Union[list[TimeSeries], list[str], None] = None + case: Union[CaseWrite, str, dm.NodeId, None] = Field(None, repr=False) + output_timeseries: Union[list[TimeSeries], list[str], None] = Field(None, alias="outputTimeseries") objective_sequence: Union[str, None] = Field(None, alias="objectiveSequence") + pre_run: Union[str, None] = Field(None, alias="preRun") + post_run: Union[str, None] = Field(None, alias="postRun") + shop_messages: Union[str, None] = Field(None, alias="shopMessages") + cplex_logs: Union[str, None] = Field(None, alias="cplexLogs") alerts: Union[list[AlertWrite], list[str], None] = Field(default=None, repr=False) def _to_instances_write( @@ -140,33 +154,32 @@ def _to_instances_write( properties: dict[str, Any] = {} - if self.scenario is not None: - properties["scenario"] = { - "space": self.space if isinstance(self.scenario, str) else self.scenario.space, - "externalId": self.scenario if isinstance(self.scenario, str) else self.scenario.external_id, - } - - if self.price_scenario is not None: - properties["price_scenario"] = { - "space": self.space if isinstance(self.price_scenario, str) else self.price_scenario.space, - "externalId": ( - self.price_scenario if isinstance(self.price_scenario, str) else self.price_scenario.external_id - ), + if self.case is not None: + properties["case"] = { + "space": self.space if isinstance(self.case, str) else self.case.space, + "externalId": self.case if isinstance(self.case, str) else self.case.external_id, } - if self.production is not None or write_none: - properties["production"] = [ - value if isinstance(value, str) else value.external_id for value in self.production or [] - ] or None - - if self.price is not None or write_none: - properties["price"] = [ - value if isinstance(value, str) else value.external_id for value in self.price or [] + if self.output_timeseries is not None or write_none: + properties["outputTimeseries"] = [ + value if isinstance(value, str) else value.external_id for value in self.output_timeseries or [] ] or None if self.objective_sequence is not None or write_none: properties["objectiveSequence"] = self.objective_sequence + if self.pre_run is not None or write_none: + properties["preRun"] = self.pre_run + + if self.post_run is not None or write_none: + properties["postRun"] = self.post_run + + if self.shop_messages is not None or write_none: + properties["shopMessages"] = self.shop_messages + + if self.cplex_logs is not None or write_none: + properties["cplexLogs"] = self.cplex_logs + if properties: this_node = dm.NodeApply( space=self.space, @@ -190,19 +203,12 @@ def _to_instances_write( ) resources.extend(other_resources) - if isinstance(self.scenario, DomainModelWrite): - other_resources = self.scenario._to_instances_write(cache, view_by_read_class) - resources.extend(other_resources) - - if isinstance(self.price_scenario, DomainModelWrite): - other_resources = self.price_scenario._to_instances_write(cache, view_by_read_class) + if isinstance(self.case, DomainModelWrite): + other_resources = self.case._to_instances_write(cache, view_by_read_class) resources.extend(other_resources) - if isinstance(self.production, CogniteTimeSeries): - resources.time_series.append(self.production) - - if isinstance(self.price, CogniteTimeSeries): - resources.time_series.append(self.price) + if isinstance(self.output_timeseries, CogniteTimeSeries): + resources.time_series.append(self.output_timeseries) return resources @@ -249,65 +255,33 @@ class SHOPResultApplyList(SHOPResultWriteList): ... def _create_shop_result_filter( view_id: dm.ViewId, - scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, - price_scenario: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, external_id_prefix: str | None = None, space: str | list[str] | None = None, filter: dm.Filter | None = None, ) -> dm.Filter | None: filters = [] - if scenario and isinstance(scenario, str): + if case and isinstance(case, str): filters.append( dm.filters.Equals( - view_id.as_property_ref("scenario"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": scenario} + view_id.as_property_ref("case"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": case} ) ) - if scenario and isinstance(scenario, tuple): + if case and isinstance(case, tuple): filters.append( - dm.filters.Equals( - view_id.as_property_ref("scenario"), value={"space": scenario[0], "externalId": scenario[1]} - ) - ) - if scenario and isinstance(scenario, list) and isinstance(scenario[0], str): - filters.append( - dm.filters.In( - view_id.as_property_ref("scenario"), - values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in scenario], - ) - ) - if scenario and isinstance(scenario, list) and isinstance(scenario[0], tuple): - filters.append( - dm.filters.In( - view_id.as_property_ref("scenario"), - values=[{"space": item[0], "externalId": item[1]} for item in scenario], - ) - ) - if price_scenario and isinstance(price_scenario, str): - filters.append( - dm.filters.Equals( - view_id.as_property_ref("price_scenario"), - value={"space": DEFAULT_INSTANCE_SPACE, "externalId": price_scenario}, - ) - ) - if price_scenario and isinstance(price_scenario, tuple): - filters.append( - dm.filters.Equals( - view_id.as_property_ref("price_scenario"), - value={"space": price_scenario[0], "externalId": price_scenario[1]}, - ) + dm.filters.Equals(view_id.as_property_ref("case"), value={"space": case[0], "externalId": case[1]}) ) - if price_scenario and isinstance(price_scenario, list) and isinstance(price_scenario[0], str): + if case and isinstance(case, list) and isinstance(case[0], str): filters.append( dm.filters.In( - view_id.as_property_ref("price_scenario"), - values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in price_scenario], + view_id.as_property_ref("case"), + values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in case], ) ) - if price_scenario and isinstance(price_scenario, list) and isinstance(price_scenario[0], tuple): + if case and isinstance(case, list) and isinstance(case[0], tuple): filters.append( dm.filters.In( - view_id.as_property_ref("price_scenario"), - values=[{"space": item[0], "externalId": item[1]} for item in price_scenario], + view_id.as_property_ref("case"), values=[{"space": item[0], "externalId": item[1]} for item in case] ) ) if external_id_prefix is not None: diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_result_price_prod.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_result_price_prod.py new file mode 100644 index 000000000..a3b0aafdc --- /dev/null +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_result_price_prod.py @@ -0,0 +1,362 @@ +from __future__ import annotations + +import warnings +from typing import TYPE_CHECKING, Any, Literal, Optional, Union + +from cognite.client import data_modeling as dm +from cognite.client.data_classes import TimeSeries as CogniteTimeSeries +from pydantic import Field + +from ._core import ( + DEFAULT_INSTANCE_SPACE, + DataRecordWrite, + DomainModel, + DomainModelCore, + DomainModelWrite, + DomainModelWriteList, + DomainModelList, + DomainRelationWrite, + ResourcesWrite, + TimeSeries, +) +from ._shop_result import SHOPResult, SHOPResultWrite + +if TYPE_CHECKING: + from ._alert import Alert, AlertWrite + from ._case import Case, CaseWrite + from ._shop_time_series import SHOPTimeSeries, SHOPTimeSeriesWrite + + +__all__ = [ + "SHOPResultPriceProd", + "SHOPResultPriceProdWrite", + "SHOPResultPriceProdApply", + "SHOPResultPriceProdList", + "SHOPResultPriceProdWriteList", + "SHOPResultPriceProdApplyList", + "SHOPResultPriceProdFields", + "SHOPResultPriceProdTextFields", +] + + +SHOPResultPriceProdTextFields = Literal[ + "output_timeseries", "objective_sequence", "pre_run", "post_run", "shop_messages", "cplex_logs" +] +SHOPResultPriceProdFields = Literal[ + "output_timeseries", "objective_sequence", "pre_run", "post_run", "shop_messages", "cplex_logs" +] + +_SHOPRESULTPRICEPROD_PROPERTIES_BY_FIELD = { + "output_timeseries": "outputTimeseries", + "objective_sequence": "objectiveSequence", + "pre_run": "preRun", + "post_run": "postRun", + "shop_messages": "shopMessages", + "cplex_logs": "cplexLogs", +} + + +class SHOPResultPriceProd(SHOPResult): + """This represents the reading version of shop result price prod. + + It is used to when data is retrieved from CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the shop result price prod. + data_record: The data record of the shop result price prod node. + case: The case that was used to produce this result + output_timeseries: A general placeholder for all timeseries that stem from a shop run + objective_sequence: The sequence of the objective function + pre_run: The pre-run data for the SHOP run + post_run: The post-run data for the SHOP run + shop_messages: The messages from the SHOP run + cplex_logs: The logs from CPLEX + alerts: An array of calculation level Alerts. + price_timeseries: The market price timeseries from the Shop run + production_timeseries: The production timeseries wrapped as a ShopTimeSeries object containing properties related to their names and types in the resulting output shop file + """ + + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( + "sp_powerops_types", "SHOPResultPriceProd" + ) + price_timeseries: Union[SHOPTimeSeries, str, dm.NodeId, None] = Field(None, repr=False, alias="priceTimeseries") + production_timeseries: Union[list[SHOPTimeSeries], list[str], None] = Field( + default=None, repr=False, alias="productionTimeseries" + ) + + def as_write(self) -> SHOPResultPriceProdWrite: + """Convert this read version of shop result price prod to the writing version.""" + return SHOPResultPriceProdWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=self.data_record.version), + case=self.case.as_write() if isinstance(self.case, DomainModel) else self.case, + output_timeseries=self.output_timeseries, + objective_sequence=self.objective_sequence, + pre_run=self.pre_run, + post_run=self.post_run, + shop_messages=self.shop_messages, + cplex_logs=self.cplex_logs, + alerts=[alert.as_write() if isinstance(alert, DomainModel) else alert for alert in self.alerts or []], + price_timeseries=( + self.price_timeseries.as_write() + if isinstance(self.price_timeseries, DomainModel) + else self.price_timeseries + ), + production_timeseries=[ + production_timesery.as_write() if isinstance(production_timesery, DomainModel) else production_timesery + for production_timesery in self.production_timeseries or [] + ], + ) + + def as_apply(self) -> SHOPResultPriceProdWrite: + """Convert this read version of shop result price prod to the writing version.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class SHOPResultPriceProdWrite(SHOPResultWrite): + """This represents the writing version of shop result price prod. + + It is used to when data is sent to CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the shop result price prod. + data_record: The data record of the shop result price prod node. + case: The case that was used to produce this result + output_timeseries: A general placeholder for all timeseries that stem from a shop run + objective_sequence: The sequence of the objective function + pre_run: The pre-run data for the SHOP run + post_run: The post-run data for the SHOP run + shop_messages: The messages from the SHOP run + cplex_logs: The logs from CPLEX + alerts: An array of calculation level Alerts. + price_timeseries: The market price timeseries from the Shop run + production_timeseries: The production timeseries wrapped as a ShopTimeSeries object containing properties related to their names and types in the resulting output shop file + """ + + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( + "sp_powerops_types", "SHOPResultPriceProd" + ) + price_timeseries: Union[SHOPTimeSeriesWrite, str, dm.NodeId, None] = Field( + None, repr=False, alias="priceTimeseries" + ) + production_timeseries: Union[list[SHOPTimeSeriesWrite], list[str], None] = Field( + default=None, repr=False, alias="productionTimeseries" + ) + + def _to_instances_write( + self, + cache: set[tuple[str, str]], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, + write_none: bool = False, + ) -> ResourcesWrite: + resources = ResourcesWrite() + if self.as_tuple_id() in cache: + return resources + + write_view = (view_by_read_class or {}).get( + SHOPResultPriceProd, dm.ViewId("sp_powerops_models", "SHOPResultPriceProd", "1") + ) + + properties: dict[str, Any] = {} + + if self.case is not None: + properties["case"] = { + "space": self.space if isinstance(self.case, str) else self.case.space, + "externalId": self.case if isinstance(self.case, str) else self.case.external_id, + } + + if self.output_timeseries is not None or write_none: + properties["outputTimeseries"] = [ + value if isinstance(value, str) else value.external_id for value in self.output_timeseries or [] + ] or None + + if self.objective_sequence is not None or write_none: + properties["objectiveSequence"] = self.objective_sequence + + if self.pre_run is not None or write_none: + properties["preRun"] = self.pre_run + + if self.post_run is not None or write_none: + properties["postRun"] = self.post_run + + if self.shop_messages is not None or write_none: + properties["shopMessages"] = self.shop_messages + + if self.cplex_logs is not None or write_none: + properties["cplexLogs"] = self.cplex_logs + + if self.price_timeseries is not None: + properties["priceTimeseries"] = { + "space": self.space if isinstance(self.price_timeseries, str) else self.price_timeseries.space, + "externalId": ( + self.price_timeseries + if isinstance(self.price_timeseries, str) + else self.price_timeseries.external_id + ), + } + + if properties: + this_node = dm.NodeApply( + space=self.space, + external_id=self.external_id, + existing_version=self.data_record.existing_version, + type=self.node_type, + sources=[ + dm.NodeOrEdgeData( + source=write_view, + properties=properties, + ) + ], + ) + resources.nodes.append(this_node) + cache.add(self.as_tuple_id()) + + edge_type = dm.DirectRelationReference("sp_powerops_types", "calculationIssue") + for alert in self.alerts or []: + other_resources = DomainRelationWrite.from_edge_to_resources( + cache, start_node=self, end_node=alert, edge_type=edge_type, view_by_read_class=view_by_read_class + ) + resources.extend(other_resources) + + edge_type = dm.DirectRelationReference("sp_powerops_types", "SHOPResultPriceProd.productionTimeseries") + for production_timesery in self.production_timeseries or []: + other_resources = DomainRelationWrite.from_edge_to_resources( + cache, + start_node=self, + end_node=production_timesery, + edge_type=edge_type, + view_by_read_class=view_by_read_class, + ) + resources.extend(other_resources) + + if isinstance(self.case, DomainModelWrite): + other_resources = self.case._to_instances_write(cache, view_by_read_class) + resources.extend(other_resources) + + if isinstance(self.price_timeseries, DomainModelWrite): + other_resources = self.price_timeseries._to_instances_write(cache, view_by_read_class) + resources.extend(other_resources) + + if isinstance(self.output_timeseries, CogniteTimeSeries): + resources.time_series.append(self.output_timeseries) + + return resources + + +class SHOPResultPriceProdApply(SHOPResultPriceProdWrite): + def __new__(cls, *args, **kwargs) -> SHOPResultPriceProdApply: + warnings.warn( + "SHOPResultPriceProdApply is deprecated and will be removed in v1.0. Use SHOPResultPriceProdWrite instead." + "The motivation for this change is that Write is a more descriptive name for the writing version of the" + "SHOPResultPriceProd.", + UserWarning, + stacklevel=2, + ) + return super().__new__(cls) + + +class SHOPResultPriceProdList(DomainModelList[SHOPResultPriceProd]): + """List of shop result price prods in the read version.""" + + _INSTANCE = SHOPResultPriceProd + + def as_write(self) -> SHOPResultPriceProdWriteList: + """Convert these read versions of shop result price prod to the writing versions.""" + return SHOPResultPriceProdWriteList([node.as_write() for node in self.data]) + + def as_apply(self) -> SHOPResultPriceProdWriteList: + """Convert these read versions of primitive nullable to the writing versions.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class SHOPResultPriceProdWriteList(DomainModelWriteList[SHOPResultPriceProdWrite]): + """List of shop result price prods in the writing version.""" + + _INSTANCE = SHOPResultPriceProdWrite + + +class SHOPResultPriceProdApplyList(SHOPResultPriceProdWriteList): ... + + +def _create_shop_result_price_prod_filter( + view_id: dm.ViewId, + case: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + price_timeseries: str | tuple[str, str] | list[str] | list[tuple[str, str]] | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + filter: dm.Filter | None = None, +) -> dm.Filter | None: + filters = [] + if case and isinstance(case, str): + filters.append( + dm.filters.Equals( + view_id.as_property_ref("case"), value={"space": DEFAULT_INSTANCE_SPACE, "externalId": case} + ) + ) + if case and isinstance(case, tuple): + filters.append( + dm.filters.Equals(view_id.as_property_ref("case"), value={"space": case[0], "externalId": case[1]}) + ) + if case and isinstance(case, list) and isinstance(case[0], str): + filters.append( + dm.filters.In( + view_id.as_property_ref("case"), + values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in case], + ) + ) + if case and isinstance(case, list) and isinstance(case[0], tuple): + filters.append( + dm.filters.In( + view_id.as_property_ref("case"), values=[{"space": item[0], "externalId": item[1]} for item in case] + ) + ) + if price_timeseries and isinstance(price_timeseries, str): + filters.append( + dm.filters.Equals( + view_id.as_property_ref("priceTimeseries"), + value={"space": DEFAULT_INSTANCE_SPACE, "externalId": price_timeseries}, + ) + ) + if price_timeseries and isinstance(price_timeseries, tuple): + filters.append( + dm.filters.Equals( + view_id.as_property_ref("priceTimeseries"), + value={"space": price_timeseries[0], "externalId": price_timeseries[1]}, + ) + ) + if price_timeseries and isinstance(price_timeseries, list) and isinstance(price_timeseries[0], str): + filters.append( + dm.filters.In( + view_id.as_property_ref("priceTimeseries"), + values=[{"space": DEFAULT_INSTANCE_SPACE, "externalId": item} for item in price_timeseries], + ) + ) + if price_timeseries and isinstance(price_timeseries, list) and isinstance(price_timeseries[0], tuple): + filters.append( + dm.filters.In( + view_id.as_property_ref("priceTimeseries"), + values=[{"space": item[0], "externalId": item[1]} for item in price_timeseries], + ) + ) + if external_id_prefix is not None: + filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) + if isinstance(space, str): + filters.append(dm.filters.Equals(["node", "space"], value=space)) + if space and isinstance(space, list): + filters.append(dm.filters.In(["node", "space"], values=space)) + if filter: + filters.append(filter) + return dm.filters.And(*filters) if filters else None diff --git a/cognite/powerops/client/_generated/v1/data_classes/_shop_time_series.py b/cognite/powerops/client/_generated/v1/data_classes/_shop_time_series.py new file mode 100644 index 000000000..5053f6630 --- /dev/null +++ b/cognite/powerops/client/_generated/v1/data_classes/_shop_time_series.py @@ -0,0 +1,249 @@ +from __future__ import annotations + +import warnings +from typing import Any, Literal, Optional, Union + +from cognite.client import data_modeling as dm +from cognite.client.data_classes import TimeSeries as CogniteTimeSeries +from pydantic import Field + +from ._core import ( + DEFAULT_INSTANCE_SPACE, + DataRecordWrite, + DomainModel, + DomainModelCore, + DomainModelWrite, + DomainModelWriteList, + DomainModelList, + DomainRelationWrite, + ResourcesWrite, + TimeSeries, +) + + +__all__ = [ + "SHOPTimeSeries", + "SHOPTimeSeriesWrite", + "SHOPTimeSeriesApply", + "SHOPTimeSeriesList", + "SHOPTimeSeriesWriteList", + "SHOPTimeSeriesApplyList", + "SHOPTimeSeriesFields", + "SHOPTimeSeriesTextFields", +] + + +SHOPTimeSeriesTextFields = Literal["object_type", "object_name", "attribute_name", "timeseries"] +SHOPTimeSeriesFields = Literal["object_type", "object_name", "attribute_name", "timeseries"] + +_SHOPTIMESERIES_PROPERTIES_BY_FIELD = { + "object_type": "objectType", + "object_name": "objectName", + "attribute_name": "attributeName", + "timeseries": "timeseries", +} + + +class SHOPTimeSeries(DomainModel): + """This represents the reading version of shop time series. + + It is used to when data is retrieved from CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the shop time series. + data_record: The data record of the shop time series node. + object_type: The type of the object + object_name: The name of the object + attribute_name: The name of the attribute + timeseries: Timeseries object from output of SHOP stored as a timeseries in cdf + """ + + space: str = DEFAULT_INSTANCE_SPACE + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( + "sp_powerops_types", "SHOPTimeSeries" + ) + object_type: Optional[str] = Field(None, alias="objectType") + object_name: Optional[str] = Field(None, alias="objectName") + attribute_name: Optional[str] = Field(None, alias="attributeName") + timeseries: Union[TimeSeries, str, None] = None + + def as_write(self) -> SHOPTimeSeriesWrite: + """Convert this read version of shop time series to the writing version.""" + return SHOPTimeSeriesWrite( + space=self.space, + external_id=self.external_id, + data_record=DataRecordWrite(existing_version=self.data_record.version), + object_type=self.object_type, + object_name=self.object_name, + attribute_name=self.attribute_name, + timeseries=self.timeseries, + ) + + def as_apply(self) -> SHOPTimeSeriesWrite: + """Convert this read version of shop time series to the writing version.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class SHOPTimeSeriesWrite(DomainModelWrite): + """This represents the writing version of shop time series. + + It is used to when data is sent to CDF. + + Args: + space: The space where the node is located. + external_id: The external id of the shop time series. + data_record: The data record of the shop time series node. + object_type: The type of the object + object_name: The name of the object + attribute_name: The name of the attribute + timeseries: Timeseries object from output of SHOP stored as a timeseries in cdf + """ + + space: str = DEFAULT_INSTANCE_SPACE + node_type: Union[dm.DirectRelationReference, None] = dm.DirectRelationReference( + "sp_powerops_types", "SHOPTimeSeries" + ) + object_type: Optional[str] = Field(None, alias="objectType") + object_name: Optional[str] = Field(None, alias="objectName") + attribute_name: Optional[str] = Field(None, alias="attributeName") + timeseries: Union[TimeSeries, str, None] = None + + def _to_instances_write( + self, + cache: set[tuple[str, str]], + view_by_read_class: dict[type[DomainModelCore], dm.ViewId] | None, + write_none: bool = False, + ) -> ResourcesWrite: + resources = ResourcesWrite() + if self.as_tuple_id() in cache: + return resources + + write_view = (view_by_read_class or {}).get( + SHOPTimeSeries, dm.ViewId("sp_powerops_models", "SHOPTimeSeries", "1") + ) + + properties: dict[str, Any] = {} + + if self.object_type is not None or write_none: + properties["objectType"] = self.object_type + + if self.object_name is not None or write_none: + properties["objectName"] = self.object_name + + if self.attribute_name is not None or write_none: + properties["attributeName"] = self.attribute_name + + if self.timeseries is not None or write_none: + if isinstance(self.timeseries, str) or self.timeseries is None: + properties["timeseries"] = self.timeseries + else: + properties["timeseries"] = self.timeseries.external_id + + if properties: + this_node = dm.NodeApply( + space=self.space, + external_id=self.external_id, + existing_version=self.data_record.existing_version, + type=self.node_type, + sources=[ + dm.NodeOrEdgeData( + source=write_view, + properties=properties, + ) + ], + ) + resources.nodes.append(this_node) + cache.add(self.as_tuple_id()) + + if isinstance(self.timeseries, CogniteTimeSeries): + resources.time_series.append(self.timeseries) + + return resources + + +class SHOPTimeSeriesApply(SHOPTimeSeriesWrite): + def __new__(cls, *args, **kwargs) -> SHOPTimeSeriesApply: + warnings.warn( + "SHOPTimeSeriesApply is deprecated and will be removed in v1.0. Use SHOPTimeSeriesWrite instead." + "The motivation for this change is that Write is a more descriptive name for the writing version of the" + "SHOPTimeSeries.", + UserWarning, + stacklevel=2, + ) + return super().__new__(cls) + + +class SHOPTimeSeriesList(DomainModelList[SHOPTimeSeries]): + """List of shop time series in the read version.""" + + _INSTANCE = SHOPTimeSeries + + def as_write(self) -> SHOPTimeSeriesWriteList: + """Convert these read versions of shop time series to the writing versions.""" + return SHOPTimeSeriesWriteList([node.as_write() for node in self.data]) + + def as_apply(self) -> SHOPTimeSeriesWriteList: + """Convert these read versions of primitive nullable to the writing versions.""" + warnings.warn( + "as_apply is deprecated and will be removed in v1.0. Use as_write instead.", + UserWarning, + stacklevel=2, + ) + return self.as_write() + + +class SHOPTimeSeriesWriteList(DomainModelWriteList[SHOPTimeSeriesWrite]): + """List of shop time series in the writing version.""" + + _INSTANCE = SHOPTimeSeriesWrite + + +class SHOPTimeSeriesApplyList(SHOPTimeSeriesWriteList): ... + + +def _create_shop_time_series_filter( + view_id: dm.ViewId, + object_type: str | list[str] | None = None, + object_type_prefix: str | None = None, + object_name: str | list[str] | None = None, + object_name_prefix: str | None = None, + attribute_name: str | list[str] | None = None, + attribute_name_prefix: str | None = None, + external_id_prefix: str | None = None, + space: str | list[str] | None = None, + filter: dm.Filter | None = None, +) -> dm.Filter | None: + filters = [] + if isinstance(object_type, str): + filters.append(dm.filters.Equals(view_id.as_property_ref("objectType"), value=object_type)) + if object_type and isinstance(object_type, list): + filters.append(dm.filters.In(view_id.as_property_ref("objectType"), values=object_type)) + if object_type_prefix is not None: + filters.append(dm.filters.Prefix(view_id.as_property_ref("objectType"), value=object_type_prefix)) + if isinstance(object_name, str): + filters.append(dm.filters.Equals(view_id.as_property_ref("objectName"), value=object_name)) + if object_name and isinstance(object_name, list): + filters.append(dm.filters.In(view_id.as_property_ref("objectName"), values=object_name)) + if object_name_prefix is not None: + filters.append(dm.filters.Prefix(view_id.as_property_ref("objectName"), value=object_name_prefix)) + if isinstance(attribute_name, str): + filters.append(dm.filters.Equals(view_id.as_property_ref("attributeName"), value=attribute_name)) + if attribute_name and isinstance(attribute_name, list): + filters.append(dm.filters.In(view_id.as_property_ref("attributeName"), values=attribute_name)) + if attribute_name_prefix is not None: + filters.append(dm.filters.Prefix(view_id.as_property_ref("attributeName"), value=attribute_name_prefix)) + if external_id_prefix is not None: + filters.append(dm.filters.Prefix(["node", "externalId"], value=external_id_prefix)) + if isinstance(space, str): + filters.append(dm.filters.Equals(["node", "space"], value=space)) + if space and isinstance(space, list): + filters.append(dm.filters.In(["node", "space"], values=space)) + if filter: + filters.append(filter) + return dm.filters.And(*filters) if filters else None diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/all_PowerOps.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/all_PowerOps.datamodel.yaml index 991fa0960..56cb8ca40 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/all_PowerOps.datamodel.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/all_PowerOps.datamodel.yaml @@ -11,10 +11,6 @@ views: space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: PriceScenario - space: '{{powerops_models}}' - type: view - version: '{{version}}' - externalId: MarketConfiguration space: '{{powerops_models}}' type: view @@ -87,14 +83,6 @@ views: space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: ScenarioRaw - space: '{{powerops_models}}' - type: view - version: '{{version}}' -- externalId: ScenarioBase - space: '{{powerops_models}}' - type: view - version: '{{version}}' - externalId: Scenario space: '{{powerops_models}}' type: view @@ -275,3 +263,23 @@ views: space: '{{powerops_models}}' version: '{{version}}' type: view +- externalId: Case + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: Commands + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: PriceProdCase + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: SHOPResultPriceProd + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: SHOPTimeSeries + space: '{{powerops_models}}' + type: view + version: '{{version}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/compute_DayAheadBenchmarking.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/compute_DayAheadBenchmarking.datamodel.yaml index b9edb4e64..422f198d1 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/compute_DayAheadBenchmarking.datamodel.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/compute_DayAheadBenchmarking.datamodel.yaml @@ -41,7 +41,7 @@ views: version: '{{version}}' type: view - space: '{{powerops_models}}' - externalId: PriceScenario + externalId: Scenario version: '{{version}}' type: view - space: '{{powerops_models}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/compute_SHOPBasedDayAhead.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/compute_SHOPBasedDayAhead.datamodel.yaml index 80b8afed5..a2b199f3c 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/compute_SHOPBasedDayAhead.datamodel.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/compute_SHOPBasedDayAhead.datamodel.yaml @@ -52,27 +52,27 @@ views: space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: PriceScenario +- externalId: Scenario space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: ScenarioRaw +- externalId: Mapping space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: Scenario +- externalId: ModelTemplate space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: Mapping +- externalId: SHOPResult space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: ModelTemplate +- externalId: Case space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: SHOPResult +- externalId: SHOPResultPriceProd space: '{{powerops_models}}' type: view version: '{{version}}' @@ -96,3 +96,15 @@ views: space: "{{powerops_models}}" type: view version: "{{version}}" +- externalId: PriceProdCase + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: SHOPTimeSeries + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: Commands + space: '{{powerops_models}}' + type: view + version: '{{version}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/compute_TotalBidCalculation.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/compute_TotalBidCalculation.datamodel.yaml index b3cb69469..fa114cd88 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/compute_TotalBidCalculation.datamodel.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/compute_TotalBidCalculation.datamodel.yaml @@ -52,15 +52,15 @@ views: space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: PriceScenario +- externalId: Alert space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: Alert +- externalId: SHOPResult space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: SHOPResult +- externalId: SHOPResultPriceProd space: '{{powerops_models}}' type: view version: '{{version}}' @@ -84,3 +84,19 @@ views: space: '{{powerops_models}}' type: view version: '{{version}}' +- externalId: PriceProdCase + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: Case + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: SHOPTimeSeries + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: Commands + space: '{{powerops_models}}' + type: view + version: '{{version}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/compute_WaterValueBasedDayAhead.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/compute_WaterValueBasedDayAhead.datamodel.yaml index 5507d99c8..cb0033395 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/compute_WaterValueBasedDayAhead.datamodel.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/compute_WaterValueBasedDayAhead.datamodel.yaml @@ -48,10 +48,6 @@ views: space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: PriceScenario - space: '{{powerops_models}}' - type: view - version: '{{version}}' - externalId: PriceArea space: '{{powerops_models}}' type: view diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/config_DayAheadConfiguration.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/config_DayAheadConfiguration.datamodel.yaml index 45eb0637c..973c4c70f 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/config_DayAheadConfiguration.datamodel.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/config_DayAheadConfiguration.datamodel.yaml @@ -20,7 +20,15 @@ views: space: "{{powerops_models}}" type: view version: "{{version}}" - - externalId: PriceScenario + - externalId: Scenario + space: "{{powerops_models}}" + type: view + version: "{{version}}" + - externalId: Mapping + space: "{{powerops_models}}" + type: view + version: "{{version}}" + - externalId: ModelTemplate space: "{{powerops_models}}" type: view version: "{{version}}" @@ -76,7 +84,7 @@ views: space: "{{powerops_models}}" type: view version: "{{version}}" - - externalId: Mapping - space: "{{powerops_models}}" + - externalId: Commands + space: '{{powerops_models}}' type: view - version: "{{version}}" + version: '{{version}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/BidConfiguration.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/BidConfiguration.container.yaml index 8f00b3af4..8d376e64e 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/BidConfiguration.container.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/BidConfiguration.container.yaml @@ -3,6 +3,14 @@ externalId: BidConfiguration name: BidConfiguration usedFor: node properties: + name: + type: + list: false + collation: ucs_basic + type: text + nullable: true + autoIncrement: false + name: name method: type: type: direct # points to BidMethod container diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/BidMethod.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/BidMethod.container.yaml index e1d36a22c..98aa0302a 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/BidMethod.container.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/BidMethod.container.yaml @@ -19,6 +19,34 @@ properties: autoIncrement: false name: mainScenario description: The main scenario to use when running the bid method + # Shop related as optional properties + shopStartSpecification: + type: + list: false + collation: ucs_basic + type: text + nullable: true + autoIncrement: false + name: name + description: The dynamic shop start specification + shopEndSpecification: + type: + list: false + collation: ucs_basic + type: text + nullable: true + autoIncrement: false + name: name + description: The dynamic shop end specification + shopBidDateSpecification: + type: + list: false + collation: ucs_basic + type: text + nullable: true + autoIncrement: false + name: name + description: The dynamic bid date specification indexes: name: properties: diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/Case.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/Case.container.yaml new file mode 100644 index 000000000..2d711e2f0 --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/Case.container.yaml @@ -0,0 +1,68 @@ +space: '{{powerops_models}}' +externalId: Case +name: Case +usedFor: node +properties: + scenario: + type: + type: direct + nullable: true + autoIncrement: false + name: scenario + description: The scenario to use when running this case + caseFile: + type: + list: false + type: file + nullable: true + autoIncrement: false + name: caseFile + description: YAML file including time series values for the relevant period, to send to CogShop. (Generated by preprocessor based on Scenario and startTime+endTime) + reservoirMapping: + type: + list: true + type: file + nullable: true + autoIncrement: false + name: reservoirMapping + description: Cut file reservoir mapping + cutOrderFiles: + type: + list: true # may be multiple per case + type: file + nullable: true + autoIncrement: false + name: cutOrderFiles + description: Cut order files (Module series in PRODRISK) + extraFiles: + type: + list: true + type: file + nullable: true + autoIncrement: false + name: extraFiles + description: Extra files to be used in the shop run + cogShopFilesConfig: + type: + list: true + type: json + nullable: true + autoIncrement: false + name: cogShopFilesConfig + description: Configuration for in what order to load the various files into pyshop + startTime: + type: + list: false + type: date + nullable: true + autoIncrement: false + name: startTime + description: The start of the optimisation period (for SHOP) + endTime: + type: + list: false + type: date + nullable: true + autoIncrement: false + name: endTime + description: The end of the optimisation period (for SHOP) diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/CommandsConfig.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/CommandsConfig.container.yaml new file mode 100644 index 000000000..8b0f2a84d --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/CommandsConfig.container.yaml @@ -0,0 +1,13 @@ +space: '{{powerops_models}}' +externalId: CommandsConfig +name: CommandsConfig +usedFor: node +properties: + commands: + type: + list: true + type: text + nullable: false + autoIncrement: false + name: commands + description: The commands to use with the shop run diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/FunctionData.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/FunctionData.container.yaml index 309222358..ea4639f63 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/FunctionData.container.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/FunctionData.container.yaml @@ -115,3 +115,9 @@ properties: nullable: true autoIncrement: false name: sequence + flag: + type: + type: boolean + nullable: true + autoIncrement: false + name: flag diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/MarketConfiguration.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/MarketConfiguration.container.yaml index 00e503b70..43f99454e 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/MarketConfiguration.container.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/MarketConfiguration.container.yaml @@ -3,6 +3,14 @@ externalId: MarketConfiguration name: MarketConfiguration usedFor: node properties: + name: + type: + list: false + collation: ucs_basic + type: text + nullable: true + autoIncrement: false + name: name marketType: type: list: false diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/ModelTemplate.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/ModelTemplate.container.yaml index 26ec6a0a4..2f52f0648 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/ModelTemplate.container.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/ModelTemplate.container.yaml @@ -3,15 +3,15 @@ externalId: ModelTemplate name: ModelTemplate usedFor: node properties: - cogShopVersion: + version: type: list: false collation: ucs_basic type: text - nullable: false + nullable: true autoIncrement: false - name: cogShopVersion - description: The tag of the cogshop image to run + name: version + description: The version of the model file (in case of multiple versions of the same model file) shopVersion: type: list: false @@ -37,15 +37,22 @@ properties: autoIncrement: false name: model description: The shop model file to use as template before mapping - source: + cogShopFilesConfig: type: - list: false - collation: ucs_basic - type: text + list: true + type: json + nullable: true + autoIncrement: false + name: cogShopFilesConfig + description: Configuration for in what order to load the various files into pyshop + extraFiles: + type: + list: true + type: file nullable: true autoIncrement: false - name: source - description: The source of the model + name: extraFiles + description: Extra files to include in the model indexes: watercourseIndex: properties: diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/MultiScenarioMatrix.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/MultiScenarioMatrix.container.yaml deleted file mode 100644 index 6ad32a4f5..000000000 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/MultiScenarioMatrix.container.yaml +++ /dev/null @@ -1,26 +0,0 @@ -space: '{{powerops_models}}' -externalId: MultiScenarioMatrix -name: MultiScenarioMatrix -usedFor: node -properties: - production: - type: - list: true - type: timeseries - nullable: true - autoIncrement: false - name: production - price: - type: - list: true - type: timeseries - nullable: true - autoIncrement: false - name: price -constraints: - requiredBidMatrix: - require: - space: '{{powerops_models}}' - externalId: BidMatrix - type: container - constraintType: requires diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/PriceScenario.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/PriceScenario.container.yaml deleted file mode 100644 index 63461c0f0..000000000 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/PriceScenario.container.yaml +++ /dev/null @@ -1,28 +0,0 @@ -space: '{{powerops_models}}' -externalId: PriceScenario -name: PriceScenario -usedFor: node -properties: - name: - type: - list: false - collation: ucs_basic - type: text - nullable: false - autoIncrement: false - name: name - description: The name of the scenario - timeseries: - type: - list: false - type: timeseries - nullable: true - autoIncrement: false - name: timeseries - description: The time series to apply mapping to -indexes: - nameIndex: - properties: - - name - indexType: btree - cursorable: false diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/SHOPResult.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/SHOPResult.container.yaml new file mode 100644 index 000000000..8045d4234 --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/SHOPResult.container.yaml @@ -0,0 +1,108 @@ +space: '{{powerops_models}}' +externalId: SHOPResult +name: SHOPResult +usedFor: node +properties: + shopTimeseries1: + type: + list: false + type: direct + nullable: true + autoIncrement: false + name: shopTimeseries1 + description: Use this direct relation type to reference a SHOPTimeSeries object + shopTimeseries2: + type: + list: false + type: direct + nullable: true + autoIncrement: false + name: shopTimeseries2 + description: Use this direct relation type to reference a SHOPTimeSeries object + ts1: + type: + list: false + type: timeseries + nullable: true + autoIncrement: false + name: ts1 + description: A timeseries from the shop result (used in more specific extensions of a shop result) + ts2: + type: + list: false + type: timeseries + nullable: true + autoIncrement: false + name: ts2 + description: A timeseries from the shop result (used in more specific extensions of a shop result) + ts3: + type: + list: false + type: timeseries + nullable: true + autoIncrement: false + name: ts3 + description: A timeseries from the shop result (used in more specific extensions of a shop result) + tsList1: + type: + list: true + type: timeseries + nullable: true + autoIncrement: false + name: tsList1 + description: A timeseries from the shop result (used in more specific extensions of a shop result) + tsList2: + type: + list: true + type: timeseries + nullable: true + autoIncrement: false + name: tsList2 + description: A timeseries from the shop result (used in more specific extensions of a shop result) + preRun: + type: + list: false + type: file + nullable: true + autoIncrement: false + name: preRun + description: The prerun file from the shop run + shopMessages: + type: + list: false + type: file + nullable: true + autoIncrement: false + name: shopMessages + description: The prerun file from the shop run + cplexLogs: + type: + list: false + type: file + nullable: true + autoIncrement: false + name: cplexLogs + description: The cplex logs from the shop run + postRun: + type: + list: false + type: file + nullable: true + autoIncrement: false + name: postRun + description: The post run file from the shop run + case: + type: + list: false + type: direct + nullable: true + autoIncrement: false + name: case + description: The scenario that was used to create the shop result + objectiveSequence: + type: + list: false + type: sequence + nullable: true + autoIncrement: false + name: objectiveSequence diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/SHOPTimeSeries.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/SHOPTimeSeries.container.yaml new file mode 100644 index 000000000..a67afe41c --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/SHOPTimeSeries.container.yaml @@ -0,0 +1,51 @@ +space: '{{powerops_models}}' +externalId: SHOPTimeSeries +name: SHOPTimeSeries +usedFor: node +properties: + objectType: + type: + list: false + collation: ucs_basic + type: text + nullable: true + autoIncrement: false + name: objectType + description: The type of the object + objectName: + type: + list: false + collation: ucs_basic + type: text + nullable: true + autoIncrement: false + name: objectName + description: The name of the object + attributeName: + type: + list: false + collation: ucs_basic + type: text + nullable: true + autoIncrement: false + name: attributeName + description: The name of the attribute + timeseries: + type: + list: false + type: timeseries + nullable: true + autoIncrement: false + name: timeseries + description: Timeseries object from output of SHOP stored as a timeseries in cdf +indexes: + objectNameIndex: + properties: + - objectName + indexType: btree + cursorable: false + objectTypeIndex: + properties: + - objectType + indexType: btree + cursorable: false diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/Scenario.container.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/Scenario.container.yaml index 74708f4ed..fc24d51bf 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/containers/Scenario.container.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/containers/Scenario.container.yaml @@ -22,29 +22,12 @@ properties: description: The model template commands: type: - list: true - collation: ucs_basic - type: text + list: false + type: direct nullable: true autoIncrement: false name: commands - description: The commands to run - extraFiles: - type: - list: true - type: file - nullable: true - autoIncrement: false - name: extraFiles - description: Extra files to include in the model - modelFile: - type: - list: false - type: file - nullable: false - autoIncrement: false - name: modelFile - description: The model file + description: The commands to run shop with source: type: list: false @@ -53,82 +36,7 @@ properties: nullable: true autoIncrement: false name: source - description: The source of the model - shopStartSpecification: - type: - list: false - collation: ucs_basic - type: text - nullable: true - autoIncrement: false - name: shopStartSpecification - description: The specification for when to start the shop - shopEndSpecification: - type: - list: false - collation: ucs_basic - type: text - nullable: true - autoIncrement: false - name: shopEndSpecification - description: The specification for when to end the shop - shopStart: - type: - list: false - type: date - nullable: true - autoIncrement: false - name: shopStart - description: The time to start the shop - shopEnd: - type: - list: false - type: date - nullable: true - autoIncrement: false - name: shopEnd - description: The time to end the shop - shopStartTimestamp: - type: - list: false - type: timestamp - nullable: true - autoIncrement: false - name: shopStart - description: The time to start the shop - shopEndTimestamp: - type: - list: false - type: timestamp - nullable: true - autoIncrement: false - name: shopEnd - description: The time to end the shop - bidDate: - type: - list: false - type: date - nullable: true - autoIncrement: false - name: bidDate - description: The date of the bid - shopVersion: - type: - list: false - collation: ucs_basic - type: text - nullable: true - autoIncrement: false - name: shopVersion - description: The version of SHOP to run - isReady: - type: - list: false - type: boolean - nullable: true - autoIncrement: false - name: isReady - description: Is the scenario ready to run + description: The source of the scenario indexes: nameIndex: properties: diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/frontend_Asset.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/frontend_Asset.datamodel.yaml index 2c583d67d..9d4e7f534 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/frontend_Asset.datamodel.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/frontend_Asset.datamodel.yaml @@ -36,7 +36,3 @@ views: space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: PriceScenario - space: '{{powerops_models}}' - type: view - version: '{{version}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/frontend_DayAheadBid.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/frontend_DayAheadBid.datamodel.yaml index fedb89014..5015ded37 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/frontend_DayAheadBid.datamodel.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/frontend_DayAheadBid.datamodel.yaml @@ -44,31 +44,35 @@ views: space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: PriceScenario +- externalId: Alert space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: Alert +- externalId: Scenario space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: SHOPResult +- externalId: ModelTemplate space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: Scenario +- externalId: Mapping space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: ModelTemplate +- externalId: WatercourseShop space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: Mapping +- externalId: PriceProdCase space: '{{powerops_models}}' type: view version: '{{version}}' -- externalId: WatercourseShop +- externalId: Commands + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: Case space: '{{powerops_models}}' type: view version: '{{version}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/node_types/power-ops-types.powerops_nodes.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/node_types/power-ops-types.powerops_nodes.yaml index 2ff050e37..b6e6a5580 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/node_types/power-ops-types.powerops_nodes.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/node_types/power-ops-types.powerops_nodes.yaml @@ -53,6 +53,32 @@ - space: '{{powerops_type_space}}' externalId: PowerAsset +## CogShop types +- space: '{{powerops_type_space}}' + externalId: Case +- space: {{powerops_type_space}} + externalId: ModelTemplate +- space: {{powerops_type_space}} + externalId: Mapping +- space: {{powerops_type_space}} + externalId: Scenario +- space: {{powerops_type_space}} + externalId: PriceScenario +- space: '{{powerops_type_space}}' + externalId: SHOPResult +- space: '{{powerops_type_space}}' + externalId: SHOPResultPriceProd +- space: '{{powerops_type_space}}' + externalId: PriceProdCase +- space: '{{powerops_type_space}}' + externalId: Commands +- space: '{{powerops_type_space}}' + externalId: SHOPTimeSeries +- space: '{{powerops_type_space}}' + externalId: SHOPResultPriceProd.productionTimeseries +- space: '{{powerops_type_space}}' + externalId: MultiScenarioMatrix.scenarioResults + ## DayAhead Types - space: {{powerops_type_space}} externalId: DayAheadBasicBidMatrix @@ -76,20 +102,12 @@ externalId: PreprocessorInput - space: {{powerops_type_space}} externalId: PreprocessorOutput -- space: {{powerops_type_space}} - externalId: SHOPResult -- space: {{powerops_type_space}} - externalId: ModelTemplate -- space: {{powerops_type_space}} - externalId: Mapping - space: {{powerops_type_space}} externalId: TotalBidMatrixCalculationInput.partialBidMatrices - space: {{powerops_type_space}} externalId: TotalBidMatrixCalculationInput - space: {{powerops_type_space}} externalId: TotalBidMatrixCalculationOutput -- space: {{powerops_type_space}} - externalId: SHOPResult - space: {{powerops_type_space}} externalId: BidConfiguration.plants - space: {{powerops_type_space}} @@ -122,12 +140,6 @@ externalId: SHOPTriggerOutput - space: {{powerops_type_space}} externalId: WaterPartialBidCalculationOutput -- space: {{powerops_type_space}} - externalId: Scenario -- space: {{powerops_type_space}} - externalId: ScenarioRaw -- space: {{powerops_type_space}} - externalId: PriceScenario - space: '{{powerops_type_space}}' externalId: BidCalculationTask - space: '{{powerops_type_space}}' @@ -151,7 +163,7 @@ - space: '{{powerops_type_space}}' externalId: BidMethodWaterValue - space: '{{powerops_type_space}}' - externalId: BidMethodDayahead.priceScenarios + externalId: BidMethodDayahead.scenarios - space: '{{powerops_type_space}}' externalId: MultiScenarioMatrix.shopResults - space: '{{powerops_type_space}}' @@ -174,8 +186,6 @@ externalId: BidConfigurationWater - space: {{powerops_type_space}} externalId: BidDocument -- space: {{powerops_type_space}} - externalId: ScenarioBase # Benchmarking Types diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/product_CogShop.datamodel.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/product_CogShop.datamodel.yaml new file mode 100644 index 000000000..57012a3e8 --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/product_CogShop.datamodel.yaml @@ -0,0 +1,30 @@ +externalId: product_CogShop +space: '{{powerops_models}}' +version: '1' +name: CogShop +description: CogShop data model +views: +- externalId: Case + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: Scenario + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: Commands + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: Mapping + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: ModelTemplate + space: '{{powerops_models}}' + type: view + version: '{{version}}' +- externalId: WatercourseShop + space: '{{powerops_models}}' + type: view + version: '{{version}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/MarketConfiguration.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/MarketConfiguration.view.yaml index ab1e0bc70..fabe5b96c 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/MarketConfiguration.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/MarketConfiguration.view.yaml @@ -23,6 +23,14 @@ filter: implements: [] version: '{{version}}' properties: + name: + container: + space: '{{powerops_models}}' + externalId: MarketConfiguration + type: container + containerPropertyIdentifier: name + name: name + description: The name of the market marketType: container: space: '{{powerops_models}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/PriceScenario.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/PriceScenario.view.yaml deleted file mode 100644 index 1cebd025b..000000000 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/PriceScenario.view.yaml +++ /dev/null @@ -1,41 +0,0 @@ -space: '{{powerops_models}}' -externalId: PriceScenario -name: PriceScenario -description: Price scenarios used in the frontend data contract -filter: - and: - - hasData: - - type: container - space: '{{powerops_models}}' - externalId: PriceScenario - - equals: - property: - - node - - space - value: '{{powerops_instance_space}}' - - equals: - property: - - node - - type - value: - externalId: PriceScenario - space: '{{powerops_type_space}}' -implements: [] -version: '{{version}}' -properties: - name: - container: - space: '{{powerops_models}}' - externalId: PriceScenario - type: container - containerPropertyIdentifier: name - name: name - description: The name of the scenario - timeseries: - container: - space: '{{powerops_models}}' - externalId: PriceScenario - type: container - containerPropertyIdentifier: timeseries - name: timeseries - description: The price timeseries in this price scenario diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/SHOPResult.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/SHOPResult.view.yaml deleted file mode 100644 index 33ecaccb3..000000000 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/SHOPResult.view.yaml +++ /dev/null @@ -1,88 +0,0 @@ -space: '{{powerops_models}}' -externalId: SHOPResult -name: SHOPResult -description: The result of a shop run with reference to the Scenario instance that was used to produce it -filter: - and: - - hasData: - - type: container - space: '{{powerops_models}}' - externalId: FunctionData - - equals: - property: - - node - - space - value: '{{powerops_instance_space}}' - - equals: - property: - - node - - type - value: - externalId: SHOPResult - space: '{{powerops_type_space}}' -implements: [] -version: '{{version}}' -properties: - scenario: - container: - space: '{{powerops_models}}' - externalId: FunctionData - type: container - containerPropertyIdentifier: direct1 - name: scenario - description: The Shop scenario that was used to produce this result - source: - space: '{{powerops_models}}' - externalId: Scenario - version: '{{version}}' - type: view - price_scenario: - container: - space: '{{powerops_models}}' - externalId: FunctionData - type: container - containerPropertyIdentifier: direct2 - name: price_scenario - description: The price scenario that was used to produce this result - source: - space: '{{powerops_models}}' - externalId: PriceScenario - version: '{{version}}' - type: view - production: - container: - space: '{{powerops_models}}' - externalId: FunctionData - type: container - containerPropertyIdentifier: timeseries1 - name: production - description: The result production timeseries from a SHOP run - price: - container: - space: '{{powerops_models}}' - externalId: FunctionData - type: container - containerPropertyIdentifier: timeseries2 - name: price - description: The result price timeseries from a SHOP run - objectiveSequence: - container: - space: '{{powerops_models}}' - externalId: FunctionData - type: container - containerPropertyIdentifier: sequence - name: objectiveSequence - description: The sequence of the objective function - alerts: - type: - space: '{{powerops_type_space}}' - externalId: calculationIssue - source: - space: '{{powerops_models}}' - externalId: Alert - version: '{{version}}' - type: view - direction: outwards - name: alerts - description: An array of calculation level Alerts. - connectionType: multi_edge_connection diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_configuration/BidConfigurationSHOP.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_configuration/BidConfigurationSHOP.view.yaml index 8b4a9f375..ab0f25354 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_configuration/BidConfigurationSHOP.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_configuration/BidConfigurationSHOP.view.yaml @@ -27,6 +27,14 @@ implements: type: view version: '{{version}}' properties: + name: + container: + space: '{{powerops_models}}' + externalId: BidConfiguration + type: container + containerPropertyIdentifier: name + name: name + description: The name of the bid configuration method: container: space: '{{powerops_models}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_matrix/MultiScenarioMatrix.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_matrix/MultiScenarioMatrix.view.yaml index 42665e8d6..73ccfaf3e 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_matrix/MultiScenarioMatrix.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_matrix/MultiScenarioMatrix.view.yaml @@ -44,16 +44,16 @@ properties: externalId: BidMethodSHOPMultiScenario version: '{{version}}' type: view - shopResults: + scenarioResults: type: space: '{{powerops_type_space}}' - externalId: MultiScenarioMatrix.shopResults + externalId: MultiScenarioMatrix.scenarioResults source: space: '{{powerops_models}}' - externalId: SHOPResult + externalId: PriceProdCase version: '{{version}}' type: view direction: outwards - name: shopResults - description: An array of results, one for each scenario. - connectionType: multi_edge_connection + name: scenarioResults + description: An array of price/prod pairs, one for each scenario/case - this is needed for the frontend + connectionType: multiEdgeConnection diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_method/1.interface.BidMethodDayAhead.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_method/1.interface.BidMethodDayAhead.view.yaml index 73e96b90a..9ba5b9090 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_method/1.interface.BidMethodDayAhead.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_method/1.interface.BidMethodDayAhead.view.yaml @@ -19,17 +19,4 @@ implements: version: '{{version}}' type: view version: '{{version}}' -properties: - mainScenario: - container: - space: '{{powerops_models}}' - externalId: BidMethod - type: container - containerPropertyIdentifier: mainScenario - name: mainScenario - description: The main scenario to use when running the bid method - source: - space: '{{powerops_models}}' - externalId: Mapping - version: '{{version}}' - type: view +properties: {} #TODO: consider removing? leaving this one like this because it does not make sense to point to a type of Scenario for WVBBG diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_method/BidMethodSHOPMultiScenario.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_method/BidMethodSHOPMultiScenario.view.yaml index 6e6bd1256..ad3783ffe 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_method/BidMethodSHOPMultiScenario.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/bid_method/BidMethodSHOPMultiScenario.view.yaml @@ -26,16 +26,40 @@ implements: type: view version: '{{version}}' properties: - priceScenarios: + scenarios: type: space: '{{powerops_type_space}}' - externalId: BidMethodDayahead.priceScenarios + externalId: BidMethodDayahead.scenarios source: space: '{{powerops_models}}' - externalId: Mapping + externalId: Scenario version: '{{version}}' type: view direction: outwards - name: priceScenarios - description: The price scenarios to use in the shop run + name: scenarios + description: The scenarios to run this bid method with (includes incremental mappings and base mappings) connectionType: multi_edge_connection + shopStartSpecification: + container: + space: '{{powerops_models}}' + externalId: BidMethod + type: container + containerPropertyIdentifier: shopStartSpecification + name: shopStartSpecification + description: The shop start specification + shopEndSpecification: + container: + space: '{{powerops_models}}' + externalId: BidMethod + type: container + containerPropertyIdentifier: shopEndSpecification + name: shopEndSpecification + description: The shop end specification + shopBidDateSpecification: + container: + space: '{{powerops_models}}' + externalId: BidMethod + type: container + containerPropertyIdentifier: shopBidDateSpecification + name: shopBidDateSpecification + description: The shop bid date specification diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Case.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Case.view.yaml new file mode 100644 index 000000000..887061dbb --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Case.view.yaml @@ -0,0 +1,93 @@ +space: '{{powerops_models}}' +externalId: Case +name: Case +description: A case that links a Scenario and shop dates to run shop with +filter: + and: + - hasData: + - type: container + space: '{{powerops_models}}' + externalId: Case + - equals: + property: + - node + - space + value: '{{powerops_instance_space}}' + - equals: + property: + - node + - type + value: + externalId: Case + space: '{{powerops_type_space}}' +implements: [] +version: '{{version}}' +properties: + scenario: + container: + space: '{{powerops_models}}' + externalId: Case + type: container + containerPropertyIdentifier: scenario + name: scenario + description: The Shop scenario that was used to produce this result + source: + space: '{{powerops_models}}' + externalId: Scenario + version: '{{version}}' + type: view + caseFile: + container: + space: '{{powerops_models}}' + externalId: Case + type: container + containerPropertyIdentifier: caseFile + name: caseFile + description: The case file used + reservoirMapping: + container: + space: '{{powerops_models}}' + externalId: Case + type: container + containerPropertyIdentifier: reservoirMapping + name: reservoirMapping + description: The cut file reservoir mapping + cutOrderFiles: + container: + space: '{{powerops_models}}' + externalId: Case + type: container + containerPropertyIdentifier: cutOrderFiles + name: cutOrderFiles + description: Cut order files (Module series in PRODRISK) + extraFiles: + container: + space: '{{powerops_models}}' + externalId: Case + type: container + containerPropertyIdentifier: extraFiles + name: extraFiles + cogShopFilesConfig: + container: + space: '{{powerops_models}}' + externalId: Case + type: container + containerPropertyIdentifier: cogShopFilesConfig + name: cogShopFilesConfig + description: Configuration for in what order to load the various files into pyshop + startTime: + container: + space: '{{powerops_models}}' + externalId: Case + type: container + containerPropertyIdentifier: startTime + name: startTime + description: The start time of the case + endTime: + container: + space: '{{powerops_models}}' + externalId: Case + type: container + containerPropertyIdentifier: endTime + name: endTime + description: The end time of the case diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Commands.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Commands.view.yaml new file mode 100644 index 000000000..55e5fc5a7 --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Commands.view.yaml @@ -0,0 +1,33 @@ +space: '{{powerops_models}}' +externalId: Commands +name: Commands +description: The commands to use in the shop model file +filter: + and: + - hasData: + - type: container + space: '{{powerops_models}}' + externalId: CommandsConfig + - equals: + property: + - node + - space + value: '{{powerops_instance_space}}' + - equals: + property: + - node + - type + value: + externalId: Commands + space: '{{powerops_type_space}}' +implements: [] +version: '{{version}}' +properties: + commands: + container: + space: '{{powerops_models}}' + externalId: CommandsConfig + type: container + containerPropertyIdentifier: commands + name: commands + description: The commands used in the shop model file diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/Mapping.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Mapping.view.yaml similarity index 100% rename from cognite/powerops/custom_modules/power_model_v1/data_models/views/Mapping.view.yaml rename to cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Mapping.view.yaml diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/ModelTemplate.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/ModelTemplate.view.yaml similarity index 78% rename from cognite/powerops/custom_modules/power_model_v1/data_models/views/ModelTemplate.view.yaml rename to cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/ModelTemplate.view.yaml index 522108dc1..f2d156bcd 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/ModelTemplate.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/ModelTemplate.view.yaml @@ -23,14 +23,14 @@ filter: implements: [] version: '{{version}}' properties: - cogShopVersion: + version: container: space: '{{powerops_models}}' externalId: ModelTemplate type: container - containerPropertyIdentifier: cogShopVersion - name: cogShopVersion - description: The tag of the cogshop image to run + containerPropertyIdentifier: version + name: version + description: The version of the model shopVersion: container: space: '{{powerops_models}}' @@ -60,14 +60,22 @@ properties: containerPropertyIdentifier: model name: model description: The shop model file to use as template before applying base mapping - source: + cogShopFilesConfig: container: space: '{{powerops_models}}' externalId: ModelTemplate type: container - containerPropertyIdentifier: source - name: source - description: The source of the model, for example, 'resync' + containerPropertyIdentifier: cogShopFilesConfig + name: cogShopFilesConfig + description: Configuration for in what order to load the various files into pyshop + extraFiles: + container: + space: '{{powerops_models}}' + externalId: ModelTemplate + type: container + containerPropertyIdentifier: extraFiles + name: extraFiles + description: Extra files related to a model template baseMappings: type: space: '{{powerops_type_space}}' diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Scenario.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Scenario.view.yaml new file mode 100644 index 000000000..f07b8e13e --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/cogshop/Scenario.view.yaml @@ -0,0 +1,73 @@ +space: '{{powerops_models}}' +externalId: Scenario +name: Scenario +description: The Scenario to run shop with +filter: + and: + - hasData: + - type: container + space: '{{powerops_models}}' + externalId: Scenario + - equals: + property: + - node + - space + value: '{{powerops_instance_space}}' +implements: [] +version: '{{version}}' +properties: + name: + container: + space: '{{powerops_models}}' + externalId: Scenario + type: container + containerPropertyIdentifier: name + name: name + description: The name of the scenario to run + modelTemplate: + container: + space: '{{powerops_models}}' + externalId: Scenario + type: container + containerPropertyIdentifier: modelTemplate + name: modelTemplate + description: The model template to use when running the scenario + source: + space: '{{powerops_models}}' + externalId: ModelTemplate + version: '{{version}}' + type: view + commands: + container: + space: '{{powerops_models}}' + externalId: Scenario + type: container + containerPropertyIdentifier: commands + name: commands + description: The commands to run + source: + space: '{{powerops_models}}' + externalId: Commands + version: '{{version}}' + type: view + source: + container: + space: '{{powerops_models}}' + externalId: Scenario + type: container + containerPropertyIdentifier: source + name: source + description: The source of the scenario + mappingsOverride: + type: + space: '{{powerops_type_space}}' + externalId: Mapping + source: + space: '{{powerops_models}}' + externalId: Mapping + version: '{{version}}' + type: view + direction: outwards + name: mappingsOverride + description: An array of base mappings to override in shop model file + connectionType: multi_edge_connection diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_inputs/PreprocessorInput.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_inputs/PreprocessorInput.view.yaml index 110c2394d..ebce29c55 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_inputs/PreprocessorInput.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_inputs/PreprocessorInput.view.yaml @@ -27,17 +27,32 @@ implements: type: view version: '{{version}}' properties: - scenarioRaw: + scenario: container: space: '{{powerops_models}}' externalId: FunctionData type: container containerPropertyIdentifier: direct1 - name: scenarioRaw - description: The scenario that needs preprocessing before being sent to shop (has - isReady flag set to false) + name: scenario + description: The scenario to run shop with source: space: '{{powerops_models}}' - externalId: ScenarioRaw + externalId: Scenario version: '{{version}}' type: view + shopStart: + container: + space: '{{powerops_models}}' + externalId: FunctionData + type: container + containerPropertyIdentifier: date1 + name: shopStart + description: Start date of bid period + shopEnd: + container: + space: '{{powerops_models}}' + externalId: FunctionData + type: container + containerPropertyIdentifier: date2 + name: shopEnd + description: End date of bid period diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_inputs/ShopPartialBidCalculationInput.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_inputs/ShopPartialBidCalculationInput.view.yaml index 77d669f0b..297ff1a0c 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_inputs/ShopPartialBidCalculationInput.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_inputs/ShopPartialBidCalculationInput.view.yaml @@ -34,37 +34,24 @@ properties: type: container containerPropertyIdentifier: direct1 name: plant - description: The plant to calculate the partial bid for + description: The plant to calculate the partial bid for. Extract price/prod timeseries from Shop Results source: space: '{{powerops_models}}' externalId: PlantShop version: '{{version}}' type: view - shopResults: + shopResultPriceProd: type: space: '{{powerops_type_space}}' - externalId: SHOPResult + externalId: SHOPResultPriceProd source: space: '{{powerops_models}}' - externalId: SHOPResult + externalId: SHOPResultPriceProd version: '{{version}}' type: view direction: outwards name: shopResults - description: An array of shop results. - connectionType: multi_edge_connection - alerts: - type: - space: '{{powerops_type_space}}' - externalId: calculationIssue - source: - space: '{{powerops_models}}' - externalId: Alert - version: '{{version}}' - type: view - direction: outwards - name: alerts - description: An array of calculation level Alerts. + description: An array of shop results with price/prod timeserires pairs for all plants included in the respective shop scenario connectionType: multi_edge_connection marketConfiguration: container: @@ -79,3 +66,11 @@ properties: externalId: MarketConfiguration version: '{{version}}' type: view + stepEnabled: + container: + space: '{{powerops_models}}' + externalId: FunctionData + type: container + containerPropertyIdentifier: flag + name: stepEnabled + description: Whether the step is enabled or not diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_outputs/PreprocessorOutput.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_outputs/PreprocessorOutput.view.yaml index b20cfcf86..85a2e5bde 100644 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_outputs/PreprocessorOutput.view.yaml +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/function_outputs/PreprocessorOutput.view.yaml @@ -27,17 +27,17 @@ implements: type: view version: '{{version}}' properties: - scenario: + case: container: space: '{{powerops_models}}' externalId: FunctionData type: container containerPropertyIdentifier: direct1 - name: scenario - description: The prepped and processed scenario to send to shop trigger + name: case + description: The Case to trigger shop with source: space: '{{powerops_models}}' - externalId: Scenario + externalId: Case version: '{{version}}' type: view input: diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/1-interface.ScenarioBase.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/1-interface.ScenarioBase.view.yaml deleted file mode 100644 index 14859600b..000000000 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/1-interface.ScenarioBase.view.yaml +++ /dev/null @@ -1,141 +0,0 @@ -space: '{{powerops_models}}' -externalId: ScenarioBase -name: ScenarioBase -description: SHOP scenario that holds all data needed for a shop run. May be unprocessed - or processed -filter: - and: - - hasData: - - type: container - space: '{{powerops_models}}' - externalId: Scenario - - equals: - property: - - node - - space - value: '{{powerops_instance_space}}' -implements: [] -version: '{{version}}' -properties: - name: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: name - name: name - description: The name of the scenario to run - shopVersion: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: shopVersion - name: shopVersion - description: The shop version to use when running the scenario - modelTemplate: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: modelTemplate - name: modelTemplate - description: The model template to use when running the scenario - source: - space: '{{powerops_models}}' - externalId: ModelTemplate - version: '{{version}}' - type: view - modelFile: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: modelFile - name: modelFile - description: The final model file to use when running the scenario (after modelTemplate is processed) - commands: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: commands - name: commands - description: The commands to run when running the scenario - extraFiles: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: extraFiles - name: extraFiles - description: Extra files to include when running the scenario - source: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: source - name: source - description: The source of the scenario - shopStartSpecification: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: shopStartSpecification - name: shopStartSpecification - description: The shop start specification - shopEndSpecification: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: shopEndSpecification - name: shopEndSpecification - description: The shop end specification - shopStart: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: shopStartTimestamp - name: shopStart - description: The shop start time - shopEnd: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: shopEndTimestamp - name: shopEnd - description: The shop end time - bidDate: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: bidDate - name: bidDate - description: The bid date of the scenario - isReady: - container: - space: '{{powerops_models}}' - externalId: Scenario - type: container - containerPropertyIdentifier: isReady - name: isReady - description: Whether the scenario is ready to be run - mappingsOverride: - type: - space: '{{powerops_type_space}}' - externalId: Mapping - source: - space: '{{powerops_models}}' - externalId: Mapping - version: '{{version}}' - type: view - direction: outwards - name: mappingsOverride - description: An array of base mappings to override in shop model file - connectionType: multi_edge_connection diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/Scenario.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/Scenario.view.yaml deleted file mode 100644 index 722d213aa..000000000 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/Scenario.view.yaml +++ /dev/null @@ -1,29 +0,0 @@ -space: '{{powerops_models}}' -externalId: Scenario -name: Scenario -description: SHOP scenario that holds all data needed for a shop run. May be unprocessed - or processed -filter: - and: - - hasData: - - type: container - space: '{{powerops_models}}' - externalId: Scenario - - equals: - property: - - node - - space - value: '{{powerops_instance_space}}' - - equals: - property: - - '{{powerops_models}}' - - Scenario - - isReady - value: 'true' -implements: -- space: '{{powerops_models}}' - externalId: ScenarioBase - version: '{{version}}' - type: view -version: '{{version}}' -properties: {} diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/ScenarioRaw.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/ScenarioRaw.view.yaml deleted file mode 100644 index 1f1ec3303..000000000 --- a/cognite/powerops/custom_modules/power_model_v1/data_models/views/scenario/ScenarioRaw.view.yaml +++ /dev/null @@ -1,29 +0,0 @@ -space: '{{powerops_models}}' -externalId: ScenarioRaw -name: ScenarioRaw -description: SHOP scenario that holds all data needed for a shop run. May be unprocessed - or processed -filter: - and: - - hasData: - - type: container - space: '{{powerops_models}}' - externalId: Scenario - - equals: - property: - - node - - space - value: '{{powerops_instance_space}}' - - equals: - property: - - '{{powerops_models}}' - - Scenario - - isReady - value: 'false' -implements: -- space: '{{powerops_models}}' - externalId: ScenarioBase - version: '{{version}}' - type: view -version: '{{version}}' -properties: {} diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/1-interface.SHOPResult.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/1-interface.SHOPResult.view.yaml new file mode 100644 index 000000000..fcb2e8f99 --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/1-interface.SHOPResult.view.yaml @@ -0,0 +1,93 @@ +space: '{{powerops_models}}' +externalId: SHOPResult +name: SHOPResult +description: A generic shop result type that collects all time series outputs from SHOP. This type replaces the POWEROPS_SHOP_RUN event in cdf today +#TODO: Consider adding a filter for SHOPResult and SHOPResultPriceProd to exclude PriceProdCase instances from this view that also uses the SHOPResult container +filter: + and: + - hasData: + - type: container + space: '{{powerops_models}}' + externalId: SHOPResult + - equals: + property: + - node + - space + value: '{{powerops_instance_space}}' +implements: [] +version: '{{version}}' +properties: + case: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: case + name: case + description: The case that was used to produce this result + source: + space: '{{powerops_models}}' + externalId: Case + version: '{{version}}' + type: view + outputTimeseries: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: tsList1 + name: outputTimeseries + description: A general placeholder for all timeseries that stem from a shop run + objectiveSequence: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: objectiveSequence + name: objectiveSequence + description: The sequence of the objective function + preRun: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: preRun + name: preRun + description: The pre-run data for the SHOP run + postRun: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: postRun + name: postRun + description: The post-run data for the SHOP run + shopMessages: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: shopMessages + name: shopMessages + description: The messages from the SHOP run + cplexLogs: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: cplexLogs + name: cplexLogs + description: The logs from CPLEX + alerts: + type: + space: '{{powerops_type_space}}' + externalId: calculationIssue + source: + space: '{{powerops_models}}' + externalId: Alert + version: '{{version}}' + type: view + direction: outwards + name: alerts + description: An array of calculation level Alerts. + connectionType: multi_edge_connection diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/PriceProdCase.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/PriceProdCase.view.yaml new file mode 100644 index 000000000..2c7ad82f4 --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/PriceProdCase.view.yaml @@ -0,0 +1,46 @@ +#NB! This view is used in the frontend to pair a price/prod pair with a scenario and link it to the multiscenariobidmatrix +space: '{{powerops_models}}' +externalId: PriceProdCase +name: PriceProdCase +version: '{{version}}' +description: The output price/prod pair of a shop run linked to the scenario that was used to produce it. This is used in the frontend to pair a price/prod pair with a scenario and link it to the multiscenariobidmatrix +filter: + and: + - hasData: + - type: container + space: '{{powerops_models}}' + externalId: SHOPResult + - equals: + property: + - node + - type + value: + space: '{{powerops_type_space}}' + externalId: PriceProdCase +properties: + price: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: ts1 + name: price + production: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: ts2 + name: production + case: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + source: + space: '{{powerops_models}}' + externalId: Case + version: '{{version}}' + type: view + containerPropertyIdentifier: case + name: case diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/SHOPResultPriceProd.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/SHOPResultPriceProd.view.yaml new file mode 100644 index 000000000..9ce59c4b4 --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/SHOPResultPriceProd.view.yaml @@ -0,0 +1,55 @@ +space: '{{powerops_models}}' +externalId: SHOPResultPriceProd +name: SHOPResultPriceProd +description: A shop result type that extends the generic shop result with a direct link to the price and production time series +filter: + and: + - hasData: + - type: container + space: '{{powerops_models}}' + externalId: SHOPResult + - equals: + property: + - node + - space + value: '{{powerops_instance_space}}' + - equals: + property: + - node + - type + value: + externalId: SHOPResultPriceProd + space: '{{powerops_type_space}}' +implements: +- space: '{{powerops_models}}' + externalId: SHOPResult + version: '{{version}}' + type: view +version: '{{version}}' +properties: + priceTimeseries: + container: + space: '{{powerops_models}}' + externalId: SHOPResult + type: container + containerPropertyIdentifier: shopTimeseries1 + name: priceTimeseries + description: The market price timeseries from the Shop run + source: + space: '{{powerops_models}}' + externalId: SHOPTimeSeries + version: '{{version}}' + type: view + productionTimeseries: + type: + space: '{{powerops_type_space}}' + externalId: SHOPResultPriceProd.productionTimeseries + source: + space: '{{powerops_models}}' + externalId: SHOPTimeSeries + version: '{{version}}' + type: view + direction: outwards + name: productionTimeseries + description: The production timeseries wrapped as a ShopTimeSeries object containing properties related to their names and types in the resulting output shop file + connectionType: multi_edge_connection diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/SHOPTimeSeries.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/SHOPTimeSeries.view.yaml new file mode 100644 index 000000000..bb6a6dcfb --- /dev/null +++ b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/SHOPTimeSeries.view.yaml @@ -0,0 +1,57 @@ +space: '{{powerops_models}}' +externalId: SHOPTimeSeries +name: SHOPTimeSeries +description: A wrapper around a timeseries object from the output of a successful SHOP run +filter: + and: + - hasData: + - type: container + space: '{{powerops_models}}' + externalId: SHOPTimeSeries + - equals: + property: + - node + - space + value: '{{powerops_instance_space}}' + - equals: + property: + - node + - type + value: + externalId: SHOPTimeSeries + space: '{{powerops_type_space}}' +implements: [] +version: '{{version}}' +properties: + objectType: + container: + space: '{{powerops_models}}' + externalId: SHOPTimeSeries + type: container + containerPropertyIdentifier: objectType + name: objectType + description: The type of the object + objectName: + container: + space: '{{powerops_models}}' + externalId: SHOPTimeSeries + type: container + containerPropertyIdentifier: objectName + name: objectName + description: The name of the object + attributeName: + container: + space: '{{powerops_models}}' + externalId: SHOPTimeSeries + type: container + containerPropertyIdentifier: attributeName + name: attributeName + description: The name of the attribute + timeseries: + container: + space: '{{powerops_models}}' + externalId: SHOPTimeSeries + type: container + containerPropertyIdentifier: timeseries + name: timeseries + description: Timeseries object from output of SHOP stored as a timeseries in cdf diff --git a/cognite/powerops/custom_modules/power_model_v1/data_models/views/ShopObjectiveValue.view.yaml b/cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/ShopObjectiveValue.view.yaml similarity index 100% rename from cognite/powerops/custom_modules/power_model_v1/data_models/views/ShopObjectiveValue.view.yaml rename to cognite/powerops/custom_modules/power_model_v1/data_models/views/shop_result/ShopObjectiveValue.view.yaml diff --git a/pyproject.toml b/pyproject.toml index 2872de312..24547fecd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cognite-power-ops" -version = "0.88.9" +version = "0.90.0" description = "SDK for power markets operations on Cognite Data Fusion" readme = "README.md" authors = ["Cognite "] diff --git a/scripts/write_read_instances.py b/scripts/write_read_instances.py index 0c32a7140..308ecce33 100644 --- a/scripts/write_read_instances.py +++ b/scripts/write_read_instances.py @@ -40,13 +40,6 @@ def main(): # Custom fix of the Scenario/BidMatrix data as it filters on a property which the PygenMockGenerator does not set. for view_data in mock_data: - if view_data.view_id in { - dm.ViewId("sp_powerops_models", "Scenario", "1"), - dm.ViewId("sp_powerops_models", "ScenarioRaw", "1"), - }: - is_ready = True if view_data.view_id.external_id == "Scenario" else False - for node in view_data.node: - node.sources[0].properties["isReady"] = is_ready if view_data.view_id in { dm.ViewId("sp_powerops_models", "BidMatrixRaw", "1"), dm.ViewId("sp_powerops_models", "MultiScenarioMatrix", "1"),