Skip to content

Commit

Permalink
[POWEROPS-2219] Shop model to nodes (#321)
Browse files Browse the repository at this point in the history
* refactor Setup shell for asset converter

* tests: added failing tests

* refactor; Generator conversion

* refactor: Wrote plant transformation

* refactor: Added price area, watercourse and reservoir

* refactor; added missing connections

* refactor: setup CLI

* refactor; review feedback

* refactor: review feedback

* refactor: avoid breaking unreleated code

* refactor: fix line endings

* refactor: renaming

* refactor: review feedback

* refactor: review feedback

* refactor; typo

* refactor: moved apply2
  • Loading branch information
doctrino authored Mar 1, 2024
1 parent edd4aaa commit c00bf91
Show file tree
Hide file tree
Showing 8 changed files with 455 additions and 3 deletions.
7 changes: 7 additions & 0 deletions cognite/powerops/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from rich.logging import Console, RichHandler

import cognite.powerops.resync.core.echo
import cognite.powerops.resync.v2.main
from cognite import powerops
from cognite.powerops import resync
from cognite.powerops.client import PowerOpsClient
Expand Down Expand Up @@ -161,6 +162,12 @@ def apply(
echo(changed.as_github_markdown())


@app.command("apply2", help="Apply the changes from the configuration files to the data model in CDF")
def apply2(path: Annotated[Path, typer.Argument(help="Path to configuration files")]):
logging.info(f"Running apply on configuration files located in {path}")
cognite.powerops.resync.v2.main.apply2(path)


@app.command("destroy", help="Destroy all the data models created by resync and remove all the data.")
def destroy(
models: list[str] = typer.Option(
Expand Down
1 change: 1 addition & 0 deletions cognite/powerops/resync/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .core import DATAMODEL_ID_TO_RESYNC_NAME, MODELS_BY_NAME, apply, destroy, init, plan, validate
from .core.echo import Echo
from .v2.main import apply2

__all__ = ["apply", "plan", "init", "destroy", "validate", "MODELS_BY_NAME", "Echo", "DATAMODEL_ID_TO_RESYNC_NAME"]
Empty file.
20 changes: 20 additions & 0 deletions cognite/powerops/resync/v2/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from __future__ import annotations

from pathlib import Path

from rich import print

from cognite.powerops import PowerOpsClient
from cognite.powerops.resync.v2.shop_to_assets import PowerAssetImporter


def apply2(config_dir: Path, client: PowerOpsClient | None = None) -> None:
client = client or PowerOpsClient.from_settings()

importer = PowerAssetImporter.from_directory(config_dir / "production")

assets = importer.to_power_assets()

client.v1.upsert(assets)

print(f"Upserted {len(assets)} assets")
375 changes: 375 additions & 0 deletions cognite/powerops/resync/v2/shop_to_assets.py

Large diffs are not rendered by default.

36 changes: 33 additions & 3 deletions cognite/powerops/utils/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import warnings
from collections.abc import Iterator
from pathlib import Path
from typing import Annotated, Any, ForwardRef, Union, get_args, get_origin
from typing import Annotated, Any, ForwardRef, Literal, Union, get_args, get_origin, overload

import tomli_w
from cognite.client.data_classes import TimeSeries
Expand Down Expand Up @@ -156,12 +156,37 @@ def _validate(yaml_path: Path):
raise ValueError(f"File {yaml_path.name} not a valid yaml {yaml_path.suffix}")


def load_yaml(yaml_path: Path, encoding="utf-8", clean_data: bool = False) -> dict:
@overload
def load_yaml(
yaml_path: Path, expected_return_type: Literal["dict"] = "dict", encoding="utf-8", clean_data: bool = False
) -> dict: ...


@overload
def load_yaml(
yaml_path: Path, expected_return_type: Literal["list"], encoding="utf-8", clean_data: bool = False
) -> list: ...


@overload
def load_yaml(
yaml_path: Path, expected_return_type: Literal["any"], encoding="utf-8", clean_data: bool = False
) -> list | dict: ...


def load_yaml(
yaml_path: Path,
expected_return_type: Literal["dict", "list", "any"] = "any",
encoding="utf-8",
clean_data: bool = False,
) -> dict | list:
"""
Fast loading of a yaml file.
Args:
yaml_path: The path to the yaml file.
expected_return_type: The expected return type. The function will raise an error
if the file does not return the expected type. Defaults to any.
encoding: The encoding of the yaml file. Defaults to utf-8.
clean_data: Whether to clean the data from invalid characters. Defaults to False.
Expand All @@ -180,7 +205,12 @@ def load_yaml(yaml_path: Path, encoding="utf-8", clean_data: bool = False) -> di
f"File {yaml_path.parent}/{yaml_path.name} contains invalid characters: {', '.join(invalid_characters)}",
stacklevel=2,
)
return CSafeLoader(data).get_data()
output = CSafeLoader(data).get_data()
if expected_return_type == "dict" and not isinstance(output, dict):
raise ValueError(f"Expected a dictionary, got {type(output)}")
if expected_return_type == "list" and not isinstance(output, list):
raise ValueError(f"Expected a list, got {type(output)}")
return output


def dump_yaml(yaml_path: Path, data: dict, encoding="utf-8") -> None:
Expand Down
Empty file.
19 changes: 19 additions & 0 deletions tests/test_unit/test_resync/test_v2/test_shop_to_assets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from collections import Counter

from cognite.powerops.resync.v2.shop_to_assets import PowerAssetImporter
from tests.constants import ReSync


class TestShopToAssets:
def test_demo_data_to_assets(self) -> None:
importer = PowerAssetImporter.from_directory(ReSync.production)

assets = importer.to_power_assets()

counts = Counter([type(asset).__name__ for asset in assets])

assert counts["GeneratorWrite"] == 12
assert counts["PlantWrite"] == 9
assert counts["ReservoirWrite"] == 16
assert counts["WatercourseWrite"] == 1
assert counts["PriceAreaWrite"] == 1

0 comments on commit c00bf91

Please sign in to comment.