diff --git a/Makefile b/Makefile index b6e259519..f8ffd6225 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ .PHONY: run-explorer run-tests run-linters build-ui build-python build-docker run-docker compose-up -version="0.96.4" +version="0.96.6" run-explorer: @echo "Running explorer API server..." # open "http://localhost:8000/static/index.html" || true diff --git a/cognite/neat/_constants.py b/cognite/neat/_constants.py index 928f0ca12..bf1e3450e 100644 --- a/cognite/neat/_constants.py +++ b/cognite/neat/_constants.py @@ -36,6 +36,7 @@ def _is_in_browser() -> bool: DataModelId("cdf_cdm", "CogniteCore", "v1"), DataModelId("cdf_idm", "CogniteProcessIndustries", "v1"), ) +COGNITE_SPACES = frozenset(model.space for model in COGNITE_MODELS) DMS_LISTABLE_PROPERTY_LIMIT = 1000 EXAMPLE_RULES = PACKAGE_DIRECTORY / "_rules" / "examples" diff --git a/cognite/neat/_issues/_base.py b/cognite/neat/_issues/_base.py index 3ac0cd61c..da45a83cf 100644 --- a/cognite/neat/_issues/_base.py +++ b/cognite/neat/_issues/_base.py @@ -109,7 +109,11 @@ def _get_variables(self) -> tuple[dict[str, str], bool]: def dump(self) -> dict[str, Any]: """Return a dictionary representation of the issue.""" variables = vars(self) - output = {to_camel(key): self._dump_value(value) for key, value in variables.items() if value is not None} + output = { + to_camel(key): self._dump_value(value) + for key, value in variables.items() + if not (value is None or key.startswith("_")) + } output["NeatIssue"] = type(self).__name__ return output diff --git a/cognite/neat/_rules/exporters/_base.py b/cognite/neat/_rules/exporters/_base.py index bf4ca7a78..e57ef108c 100644 --- a/cognite/neat/_rules/exporters/_base.py +++ b/cognite/neat/_rules/exporters/_base.py @@ -32,9 +32,11 @@ def _repr_html_(cls) -> str: class CDFExporter(BaseExporter[T_VerifiedRules, T_Export]): @abstractmethod def export_to_cdf_iterable( - self, rules: T_VerifiedRules, client: CogniteClient, dry_run: bool = False + self, rules: T_VerifiedRules, client: CogniteClient, dry_run: bool = False, fallback_one_by_one: bool = False ) -> Iterable[UploadResult]: raise NotImplementedError - def export_to_cdf(self, rules: T_VerifiedRules, client: CogniteClient, dry_run: bool = False) -> UploadResultList: - return UploadResultList(self.export_to_cdf_iterable(rules, client, dry_run)) + def export_to_cdf( + self, rules: T_VerifiedRules, client: CogniteClient, dry_run: bool = False, fallback_one_by_one: bool = False + ) -> UploadResultList: + return UploadResultList(self.export_to_cdf_iterable(rules, client, dry_run, fallback_one_by_one)) diff --git a/cognite/neat/_rules/exporters/_rules2dms.py b/cognite/neat/_rules/exporters/_rules2dms.py index f53799157..14e91904e 100644 --- a/cognite/neat/_rules/exporters/_rules2dms.py +++ b/cognite/neat/_rules/exporters/_rules2dms.py @@ -118,12 +118,16 @@ def _create_exclude_set(self): def export(self, rules: DMSRules) -> DMSSchema: return rules.as_schema(include_pipeline=self.export_pipeline, instance_space=self.instance_space) - def delete_from_cdf(self, rules: DMSRules, client: CogniteClient, dry_run: bool = False) -> Iterable[UploadResult]: + def delete_from_cdf( + self, rules: DMSRules, client: CogniteClient, dry_run: bool = False, skip_space: bool = False + ) -> Iterable[UploadResult]: to_export = self._prepare_exporters(rules, client) # we need to reverse order in which we are picking up the items to delete # as they are sorted in the order of creation and we need to delete them in reverse order for items, loader in reversed(to_export): + if skip_space and isinstance(loader, SpaceLoader): + continue item_ids = loader.get_ids(items) existing_items = loader.retrieve(item_ids) existing_ids = loader.get_ids(existing_items) @@ -162,10 +166,15 @@ def delete_from_cdf(self, rules: DMSRules, client: CogniteClient, dry_run: bool ) def export_to_cdf_iterable( - self, rules: DMSRules, client: CogniteClient, dry_run: bool = False + self, rules: DMSRules, client: CogniteClient, dry_run: bool = False, fallback_one_by_one: bool = False ) -> Iterable[UploadResult]: to_export = self._prepare_exporters(rules, client) + result_by_name = {} + if self.existing_handling == "force": + for delete_result in self.delete_from_cdf(rules, client, dry_run, skip_space=True): + result_by_name[delete_result.name] = delete_result + redeploy_data_model = False for items, loader in to_export: # The conversion from DMS to GraphQL does not seem to be triggered even if the views @@ -183,8 +192,10 @@ def export_to_cdf_iterable( created: set[Hashable] = set() skipped: set[Hashable] = set() changed: set[Hashable] = set() + deleted: set[Hashable] = set() failed_created: set[Hashable] = set() failed_changed: set[Hashable] = set() + failed_deleted: set[Hashable] = set() error_messages: list[str] = [] if dry_run: if self.existing_handling in ["update", "force"]: @@ -200,7 +211,20 @@ def export_to_cdf_iterable( try: loader.delete(to_delete) except CogniteAPIError as e: - error_messages.append(f"Failed delete: {e.message}") + if fallback_one_by_one: + for item in to_delete: + try: + loader.delete([item]) + except CogniteAPIError as item_e: + failed_deleted.add(loader.get_id(item)) + error_messages.append(f"Failed delete: {item_e!s}") + else: + deleted.add(loader.get_id(item)) + else: + error_messages.append(f"Failed delete: {e!s}") + failed_deleted.update(loader.get_id(item) for item in e.failed + e.unknown) + else: + deleted.update(loader.get_id(item) for item in to_delete) if isinstance(loader, DataModelingLoader): to_create = loader.sort_by_dependencies(to_create) @@ -208,9 +232,19 @@ def export_to_cdf_iterable( try: loader.create(to_create) except CogniteAPIError as e: - failed_created.update(loader.get_id(item) for item in e.failed + e.unknown) - created.update(loader.get_id(item) for item in e.successful) - error_messages.append(e.message) + if fallback_one_by_one: + for item in to_create: + try: + loader.create([item]) + except CogniteAPIError as item_e: + failed_created.add(loader.get_id(item)) + error_messages.append(f"Failed create: {item_e!s}") + else: + created.add(loader.get_id(item)) + else: + failed_created.update(loader.get_id(item) for item in e.failed + e.unknown) + created.update(loader.get_id(item) for item in e.successful) + error_messages.append(f"Failed create: {e!s}") else: created.update(loader.get_id(item) for item in to_create) @@ -218,9 +252,19 @@ def export_to_cdf_iterable( try: loader.update(to_update) except CogniteAPIError as e: - failed_changed.update(loader.get_id(item) for item in e.failed + e.unknown) - changed.update(loader.get_id(item) for item in e.successful) - error_messages.append(e.message) + if fallback_one_by_one: + for item in to_update: + try: + loader.update([item]) + except CogniteAPIError as e_item: + failed_changed.add(loader.get_id(item)) + error_messages.append(f"Failed update: {e_item!s}") + else: + changed.add(loader.get_id(item)) + else: + failed_changed.update(loader.get_id(item) for item in e.failed + e.unknown) + changed.update(loader.get_id(item) for item in e.successful) + error_messages.append(f"Failed update: {e!s}") else: changed.update(loader.get_id(item) for item in to_update) elif self.existing_handling == "skip": @@ -228,14 +272,22 @@ def export_to_cdf_iterable( elif self.existing_handling == "fail": failed_changed.update(loader.get_id(item) for item in to_update) + if loader.resource_name in result_by_name: + delete_result = result_by_name[loader.resource_name] + deleted.update(delete_result.deleted) + failed_deleted.update(delete_result.failed_deleted) + error_messages.extend(delete_result.error_messages) + yield UploadResult( name=loader.resource_name, created=created, changed=changed, + deleted=deleted, unchanged={loader.get_id(item) for item in unchanged}, skipped=skipped, failed_created=failed_created, failed_changed=failed_changed, + failed_deleted=failed_deleted, error_messages=error_messages, issues=issue_list, ) diff --git a/cognite/neat/_rules/importers/_spreadsheet2rules.py b/cognite/neat/_rules/importers/_spreadsheet2rules.py index ee3822fc3..14d4abf85 100644 --- a/cognite/neat/_rules/importers/_spreadsheet2rules.py +++ b/cognite/neat/_rules/importers/_spreadsheet2rules.py @@ -47,6 +47,8 @@ ("Classes", "Classes", "Class"), ("Containers", "Containers", "Container"), ("Views", "Views", "View"), + ("Enum", "Enum", "Collection"), + ("Nodes", "Nodes", "Node"), ] MANDATORY_SHEETS_BY_ROLE: dict[RoleTypes, set[str]] = { @@ -112,7 +114,7 @@ def __init__( issue_list: IssueList, required: bool = True, metadata: MetadataRaw | None = None, - sheet_prefix: Literal["", "Last", "Ref"] = "", + sheet_prefix: Literal["", "Last", "Ref", "CDMRef"] = "", ): self.issue_list = issue_list self.required = required @@ -267,6 +269,8 @@ def to_rules(self) -> ReadRules[T_InputRules]: reference_read: ReadResult | None = None if any(sheet_name.startswith("Ref") for sheet_name in user_reader.seen_sheets): reference_read = SpreadsheetReader(issue_list, sheet_prefix="Ref").read(excel_file, self.filepath) + elif any(sheet_name.startswith("CDMRef") for sheet_name in user_reader.seen_sheets): + reference_read = SpreadsheetReader(issue_list, sheet_prefix="CDMRef").read(excel_file, self.filepath) if issue_list.has_errors: return ReadRules(None, issue_list, {}) diff --git a/cognite/neat/_rules/models/dms/_exporter.py b/cognite/neat/_rules/models/dms/_exporter.py index 397b99570..c6c33dc52 100644 --- a/cognite/neat/_rules/models/dms/_exporter.py +++ b/cognite/neat/_rules/models/dms/_exporter.py @@ -402,17 +402,30 @@ def _gather_properties( return container_properties_by_id, view_properties_by_id - @staticmethod def _gather_properties_with_ancestors( + self, view_properties_by_id: dict[dm.ViewId, list[DMSProperty]], views: Sequence[DMSView], ) -> dict[dm.ViewId, list[DMSProperty]]: + all_view_properties_by_id = view_properties_by_id.copy() + if self.rules.reference: + # We need to include t + ref_view_properties_by_id = self._gather_properties(self.rules.reference.properties)[1] + for view_id, properties in ref_view_properties_by_id.items(): + if view_id not in all_view_properties_by_id: + all_view_properties_by_id[view_id] = properties + else: + existing_properties = {prop._identifier() for prop in all_view_properties_by_id[view_id]} + for prop in properties: + if prop._identifier() not in existing_properties: + all_view_properties_by_id[view_id].append(prop) + view_properties_with_parents_by_id: dict[dm.ViewId, list[DMSProperty]] = defaultdict(list) view_by_view_id = {view.view.as_id(): view for view in views} for view in views: view_id = view.view.as_id() seen: set[Hashable] = set() - if view_properties := view_properties_by_id.get(view_id): + if view_properties := all_view_properties_by_id.get(view_id): view_properties_with_parents_by_id[view_id].extend(view_properties) seen.update(prop._identifier() for prop in view_properties) if not view.implements: @@ -428,7 +441,7 @@ def _gather_properties_with_ancestors( parents.append(grandparent) seen_parents.add(grandparent) - if not (parent_view_properties := view_properties_by_id.get(parent_view_id)): + if not (parent_view_properties := all_view_properties_by_id.get(parent_view_id)): continue for prop in parent_view_properties: new_prop = prop.model_copy(update={"view": view.view}) diff --git a/cognite/neat/_rules/models/dms/_rules.py b/cognite/neat/_rules/models/dms/_rules.py index a0273e380..b7063ca11 100644 --- a/cognite/neat/_rules/models/dms/_rules.py +++ b/cognite/neat/_rules/models/dms/_rules.py @@ -1,9 +1,8 @@ import math -import sys import warnings from collections.abc import Hashable from datetime import datetime -from typing import TYPE_CHECKING, Any, ClassVar, Literal +from typing import Any, ClassVar, Literal import pandas as pd from cognite.client import data_modeling as dm @@ -11,7 +10,7 @@ from pydantic_core.core_schema import SerializationInfo, ValidationInfo from rdflib import URIRef -from cognite.neat._constants import DEFAULT_NAMESPACE +from cognite.neat._constants import COGNITE_SPACES, DEFAULT_NAMESPACE from cognite.neat._issues import MultiValueError from cognite.neat._issues.warnings import ( PrincipleMatchingSpaceAndVersionWarning, @@ -58,14 +57,6 @@ from ._schema import DMSSchema -if TYPE_CHECKING: - pass - -if sys.version_info >= (3, 11): - pass -else: - pass - _DEFAULT_VERSION = "1" @@ -199,6 +190,24 @@ def connections_value_type( raise ValueError(f"Reverse connection must have a value type that points to a view, got {value}") return value + @field_validator("container", "container_property", mode="after") + def container_set_correctly(cls, value: Any, info: ValidationInfo) -> Any: + if (connection := info.data.get("connection")) is None: + return value + if connection == "direct" and value is None: + raise ValueError( + "You must provide a container and container property for where to store direct connections" + ) + elif isinstance(connection, EdgeEntity) and value is not None: + raise ValueError( + "Edge connections are not stored in a container, please remove the container and container property" + ) + elif isinstance(connection, ReverseConnectionEntity) and value is not None: + raise ValueError( + "Reverse connection are not stored in a container, please remove the container and container property" + ) + return value + @field_serializer("reference", when_used="always") def set_reference(self, value: Any, info: SerializationInfo) -> str | None: if isinstance(info.context, dict) and info.context.get("as_reference") is True: @@ -423,7 +432,11 @@ def matching_version_and_space(cls, value: SheetList[DMSView], info: ValidationI if not (metadata := info.data.get("metadata")): return value model_version = metadata.version - if different_version := [view.view.as_id() for view in value if view.view.version != model_version]: + if different_version := [ + view.view.as_id() + for view in value + if view.view.version != model_version and view.view.space not in COGNITE_SPACES + ]: for view_id in different_version: warnings.warn( PrincipleMatchingSpaceAndVersionWarning( @@ -431,7 +444,11 @@ def matching_version_and_space(cls, value: SheetList[DMSView], info: ValidationI ), stacklevel=2, ) - if different_space := [view.view.as_id() for view in value if view.view.space != metadata.space]: + if different_space := [ + view.view.as_id() + for view in value + if view.view.space != metadata.space and view.view.space not in COGNITE_SPACES + ]: for view_id in different_space: warnings.warn( PrincipleMatchingSpaceAndVersionWarning( diff --git a/cognite/neat/_session/_to.py b/cognite/neat/_session/_to.py index f28bcb949..62a9839df 100644 --- a/cognite/neat/_session/_to.py +++ b/cognite/neat/_session/_to.py @@ -62,12 +62,18 @@ def instances(self, space: str | None = None): return loader.load_into_cdf(self._client) - def data_model(self, existing_handling: Literal["fail", "skip", "update", "force"] = "skip", dry_run: bool = False): + def data_model( + self, + existing_handling: Literal["fail", "skip", "update", "force"] = "skip", + dry_run: bool = False, + fallback_one_by_one: bool = False, + ): """Export the verified DMS data model to CDF. Args: existing_handling: How to handle if component of data model exists. Defaults to "skip". dry_run: If True, no changes will be made to CDF. Defaults to False. + fallback_one_by_one: If True, will fall back to one-by-one upload if batch upload fails. Defaults to False. ... note:: @@ -85,7 +91,9 @@ def data_model(self, existing_handling: Literal["fail", "skip", "update", "force conversion_issues = IssueList(action="to.cdf.data_model") with catch_warnings(conversion_issues): - result = exporter.export_to_cdf(self._state.data_model.last_verified_dms_rules[1], self._client, dry_run) + result = exporter.export_to_cdf( + self._state.data_model.last_verified_dms_rules[1], self._client, dry_run, fallback_one_by_one + ) result.insert(0, UploadResultCore(name="schema", issues=conversion_issues)) self._state.data_model.outcome.append(result) print("You can inspect the details with the .inspect.outcome(...) method.") diff --git a/cognite/neat/_utils/cdf/loaders/_base.py b/cognite/neat/_utils/cdf/loaders/_base.py index 54fc501df..fe4c014b9 100644 --- a/cognite/neat/_utils/cdf/loaders/_base.py +++ b/cognite/neat/_utils/cdf/loaders/_base.py @@ -27,7 +27,7 @@ def __init__(self, client: CogniteClient) -> None: @classmethod @abstractmethod - def get_id(cls, item: T_WriteClass | T_WritableCogniteResource) -> T_ID: + def get_id(cls, item: T_WriteClass | T_WritableCogniteResource | dict | T_ID) -> T_ID: raise NotImplementedError @classmethod diff --git a/cognite/neat/_utils/cdf/loaders/_data_modeling.py b/cognite/neat/_utils/cdf/loaders/_data_modeling.py index bb682e9f1..5a36d3168 100644 --- a/cognite/neat/_utils/cdf/loaders/_data_modeling.py +++ b/cognite/neat/_utils/cdf/loaders/_data_modeling.py @@ -80,8 +80,12 @@ class SpaceLoader(DataModelingLoader[str, SpaceApply, Space, SpaceApplyList, Spa resource_name = "spaces" @classmethod - def get_id(cls, item: Space | SpaceApply) -> str: - return item.space + def get_id(cls, item: Space | SpaceApply | str | dict) -> str: + if isinstance(item, Space | SpaceApply): + return item.space + if isinstance(item, dict): + return item["space"] + return item def create(self, items: Sequence[SpaceApply]) -> SpaceList: return self.client.data_modeling.spaces.apply(items) @@ -149,8 +153,12 @@ def __init__(self, client: CogniteClient, existing_handling: Literal["fail", "sk self._tried_force_deploy: set[ViewId] = set() @classmethod - def get_id(cls, item: View | ViewApply) -> ViewId: - return item.as_id() + def get_id(cls, item: View | ViewApply | ViewId | dict) -> ViewId: + if isinstance(item, View | ViewApply): + return item.as_id() + if isinstance(item, dict): + return ViewId.load(item) + return item def create(self, items: Sequence[ViewApply]) -> ViewList: if self.existing_handling == "force": @@ -247,8 +255,12 @@ def __init__(self, client: CogniteClient, existing_handling: Literal["fail", "sk self._tried_force_deploy: set[ContainerId] = set() @classmethod - def get_id(cls, item: Container | ContainerApply) -> ContainerId: - return item.as_id() + def get_id(cls, item: Container | ContainerApply | ContainerId | dict) -> ContainerId: + if isinstance(item, Container | ContainerApply): + return item.as_id() + if isinstance(item, dict): + return ContainerId.load(item) + return item def sort_by_dependencies(self, items: Sequence[ContainerApply]) -> list[ContainerApply]: container_by_id = {container.as_id(): container for container in items} @@ -292,8 +304,12 @@ class DataModelLoader(DataModelingLoader[DataModelId, DataModelApply, DataModel, resource_name = "data_models" @classmethod - def get_id(cls, item: DataModel | DataModelApply) -> DataModelId: - return item.as_id() + def get_id(cls, item: DataModel | DataModelApply | DataModelId | dict) -> DataModelId: + if isinstance(item, DataModel | DataModelApply): + return item.as_id() + if isinstance(item, dict): + return DataModelId.load(item) + return item def create(self, items: Sequence[DataModelApply]) -> DataModelList: return self.client.data_modeling.data_models.apply(items) diff --git a/cognite/neat/_utils/cdf/loaders/_ingestion.py b/cognite/neat/_utils/cdf/loaders/_ingestion.py index 298941ebd..ec49d77c0 100644 --- a/cognite/neat/_utils/cdf/loaders/_ingestion.py +++ b/cognite/neat/_utils/cdf/loaders/_ingestion.py @@ -26,10 +26,16 @@ class TransformationLoader( resource_name = "transformations" @classmethod - def get_id(cls, item: Transformation | TransformationWrite) -> str: - if item.external_id is None: - raise ValueError(f"Transformation {item} does not have an external_id") - return item.external_id + def get_id(cls, item: Transformation | TransformationWrite | str | dict) -> str: + if isinstance(item, Transformation | TransformationWrite): + if item.external_id is None: + raise ValueError(f"Transformation {item} does not have an external_id") + return item.external_id + if isinstance(item, dict): + if item.get("externalId") is None: + raise ValueError(f"Transformation {item} does not have an external_id") + return item["externalId"] + return item def create(self, items: Sequence[TransformationWrite]) -> TransformationList: return self.client.transformations.create(items) @@ -50,10 +56,16 @@ class RawDatabaseLoader(ResourceLoader[str, DatabaseWrite, Database, DatabaseWri resource_name = "databases" @classmethod - def get_id(cls, item: Database | DatabaseWrite) -> str: - if item.name is None: - raise ValueError(f"Database {item} does not have a name") - return item.name + def get_id(cls, item: Database | DatabaseWrite | str | dict) -> str: + if isinstance(item, Database | DatabaseWrite): + if item.name is None: + raise ValueError(f"Database {item} does not have a name") + return item.name + if isinstance(item, dict): + if item.get("name") is None: + raise ValueError(f"Database {item} does not have a name") + return item["name"] + return item def create(self, items: Sequence[DatabaseWrite]) -> DatabaseList: return self.client.raw.databases.create([item.name for item in items if item.name is not None]) @@ -78,8 +90,12 @@ class RawTableLoader(ResourceLoader[RawTableID, RawTableWrite, RawTable, RawTabl resource_name = "tables" @classmethod - def get_id(cls, item: RawTable | RawTableWrite) -> RawTableID: - return item.as_id() + def get_id(cls, item: RawTable | RawTableWrite | RawTableID | dict) -> RawTableID: + if isinstance(item, RawTable | RawTableWrite): + return item.as_id() + if isinstance(item, dict): + return RawTableID(database=item["database"], table=item["name"]) + return item @overload def _groupby_database(self, items: Sequence[RawTableWrite]) -> Iterable[tuple[str, Iterable[RawTableWrite]]]: ... diff --git a/cognite/neat/_version.py b/cognite/neat/_version.py index 693b5782b..0f52e8515 100644 --- a/cognite/neat/_version.py +++ b/cognite/neat/_version.py @@ -1 +1 @@ -__version__ = "0.96.4" +__version__ = "0.96.6" diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 9c21e1a80..8867b3e1f 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -19,6 +19,31 @@ Changes are grouped as follows: ### Added - Added provenance on rules in NeatSession +## [0.96.6] - 08-11-**2024** +### Fixed +- `neat.verify()` no longer gives a `PrincipleMatchingSpaceAndVersionWarning` when you include views from + the `CogniteCore` or `CogniteProcessIndustry` data models. +- In the `DMSSheet` you will now get a `RowError` if you try to set `container` or `container property` for + an edge or reverse direct relation as these are not stored in containers. +- `neat.read.excel(...)` now correctly reads the `Enum` and `Nodes` sheets. +- In the `DMSSheet`, `reverse` relations no longer give a `RowError` if the reverse property is referencing + a property in the reference sheets. + +## [0.96.5] - 07-11-**2024** +### Fixed +- Serializing `ResourceNotDefinedError` class no longer raises a `ValueError`. This happens when a `ResourceNotDefinedError` + is found, for example, when calling `neat.verify()`. +- Setting `neat.to.cdf.data_model(existing_handling='force)` will now correctly delete and recreate views and containers + if they already exist in CDF. + +### Improved +- When running `neat.to.cdf.data_model()` the entire response from CDF is now stored as an error message, not just the + text. + +### Added +- `neat.to.cdf.data_model()` now has a `fallback_one_by_one` parameter. If set to `True`, the views/containers will + be created one by one, if the batch creation fails. + ## [0.96.4] - 05-11-**2024** ### Fixed - `neat.to.excel` or `neat.to.yaml` now correctly writes `ViewTypes` and `Edge` that do not have the default diff --git a/pyproject.toml b/pyproject.toml index d15a3bc24..14a197ab4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cognite-neat" -version = "0.96.4" +version = "0.96.6" readme = "README.md" description = "Knowledge graph transformation" authors = [ diff --git a/tests/tests_integration/test_rules/test_exporters/test_dms_exporters.py b/tests/tests_integration/test_rules/test_exporters/test_dms_exporters.py index 2bdcc2a3d..639a442af 100644 --- a/tests/tests_integration/test_rules/test_exporters/test_dms_exporters.py +++ b/tests/tests_integration/test_rules/test_exporters/test_dms_exporters.py @@ -170,18 +170,22 @@ def test_export_alice_to_cdf(self, cognite_client: CogniteClient, alice_rules: D uploaded = exporter.export_to_cdf_iterable(rules, cognite_client, dry_run=False) uploaded_by_name = {entity.name: entity for entity in uploaded} - assert uploaded_by_name["containers"].success == len(rules.containers) + assert uploaded_by_name["containers"].success == len(rules.containers) * 2 # 2 x due to delete and create assert uploaded_by_name["containers"].failed == 0 - assert uploaded_by_name["views"].success == len(rules.views) + assert uploaded_by_name["views"].success == len(rules.views) * 2 # 2 x due to delete and create assert uploaded_by_name["views"].failed == 0 - assert uploaded_by_name["data_models"].success == 1 + assert uploaded_by_name["data_models"].success == 1 * 2 # 2 x due to delete and create assert uploaded_by_name["data_models"].failed == 0 - assert uploaded_by_name["spaces"].success == 1 + assert uploaded_by_name["spaces"].success == 1 # Space is not deleted assert uploaded_by_name["spaces"].failed == 0 + @pytest.mark.skip( + "We are not exposing the functionality any more. " + "It is up for discussion if we should keep it. Does the test is not maintained." + ) def test_export_pipeline_populate_and_retrieve_data( self, cognite_client: CogniteClient, table_example: InformationRules, table_example_data: dict[str, list[str]] ) -> None: @@ -259,13 +263,14 @@ def test_export_olav_dms_to_cdf(self, cognite_client: CogniteClient, olav_dms_ru uploaded = exporter.export_to_cdf_iterable(rules, cognite_client, dry_run=False) uploaded_by_name = {entity.name: entity for entity in uploaded} - assert uploaded_by_name["containers"].success == len(rules.containers) + # We have to double the amount of entities due to the delete and create + assert uploaded_by_name["containers"].success == len(rules.containers) * 2 assert uploaded_by_name["containers"].failed == 0 - assert uploaded_by_name["views"].success == len(rules.views) + assert uploaded_by_name["views"].success == len(rules.views) * 2 assert uploaded_by_name["views"].failed == 0 - assert uploaded_by_name["data_models"].success == 1 + assert uploaded_by_name["data_models"].success == 1 * 2 assert uploaded_by_name["data_models"].failed == 0 assert uploaded_by_name["spaces"].success == 1 @@ -292,13 +297,14 @@ def test_export_svein_harald_dms_to_cdf( uploaded = exporter.export_to_cdf(rules, cognite_client, dry_run=False) uploaded_by_name = {entity.name: entity for entity in uploaded} - assert uploaded_by_name["containers"].success == len(rules.containers) + # We have to double the amount of entities due to the delete and create + assert uploaded_by_name["containers"].success == len(rules.containers) * 2 assert uploaded_by_name["containers"].failed == 0 - assert uploaded_by_name["views"].success == len(schema.views) + assert uploaded_by_name["views"].success == len(schema.views) * 2 assert uploaded_by_name["views"].failed == 0 - assert uploaded_by_name["data_models"].success == 1 + assert uploaded_by_name["data_models"].success == 1 * 2 assert uploaded_by_name["data_models"].failed == 0 assert uploaded_by_name["spaces"].success == 1 @@ -355,13 +361,14 @@ def test_export_olav_updated_dms_to_cdf( uploaded = exporter.export_to_cdf_iterable(rules, cognite_client, dry_run=False) uploaded_by_name = {entity.name: entity for entity in uploaded} - assert uploaded_by_name["containers"].success == len(schema.containers) + # We have to double the amount of entities due to the delete and create + assert uploaded_by_name["containers"].success == len(schema.containers) * 2 assert uploaded_by_name["containers"].failed == 0 - assert uploaded_by_name["views"].success == len(schema.views) + assert uploaded_by_name["views"].success == len(schema.views) * 2 assert uploaded_by_name["views"].failed == 0 - assert uploaded_by_name["data_models"].success == 1 + assert uploaded_by_name["data_models"].success == 1 * 2 assert uploaded_by_name["data_models"].failed == 0 assert uploaded_by_name["spaces"].success == 1 diff --git a/tests/tests_unit/test_issues/test_issue_behavior.py b/tests/tests_unit/test_issues/test_issue_behavior.py index 579168be1..05adb97a6 100644 --- a/tests/tests_unit/test_issues/test_issue_behavior.py +++ b/tests/tests_unit/test_issues/test_issue_behavior.py @@ -1,7 +1,9 @@ import warnings +from cognite.client import data_modeling as dm + from cognite.neat._issues import IssueList -from cognite.neat._issues.errors import ResourceCreationError +from cognite.neat._issues.errors import ResourceCreationError, ResourceNotDefinedError from cognite.neat._issues.warnings import NeatValueWarning from cognite.neat._rules.transformers._verification import _catch_issues @@ -26,3 +28,15 @@ def test_warning_in_contextmanager(self) -> None: warnings.warn(my_warning, stacklevel=2) assert warning_list == IssueList([my_warning]) + + def test_dump_generic_specified(self) -> None: + my_issue = ResourceNotDefinedError[dm.ViewId]( + identifier=dm.ViewId("neat", "SKUKpi", "v1"), + location="View Sheet", + row_number=66, + sheet_name="Properties", + resource_type="view", + ) + dumped = my_issue.dump() + + assert isinstance(dumped, dict)