Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release 0.96.5 #718

Merged
merged 4 commits into from
Nov 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
.PHONY: run-explorer run-tests run-linters build-ui build-python build-docker run-docker compose-up
version="0.96.4"
version="0.96.5"
run-explorer:
@echo "Running explorer API server..."
# open "http://localhost:8000/static/index.html" || true
Expand Down
6 changes: 5 additions & 1 deletion cognite/neat/_issues/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,11 @@ def _get_variables(self) -> tuple[dict[str, str], bool]:
def dump(self) -> dict[str, Any]:
"""Return a dictionary representation of the issue."""
variables = vars(self)
output = {to_camel(key): self._dump_value(value) for key, value in variables.items() if value is not None}
output = {
to_camel(key): self._dump_value(value)
for key, value in variables.items()
if not (value is None or key.startswith("_"))
}
output["NeatIssue"] = type(self).__name__
return output

Expand Down
8 changes: 5 additions & 3 deletions cognite/neat/_rules/exporters/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,11 @@ def _repr_html_(cls) -> str:
class CDFExporter(BaseExporter[T_VerifiedRules, T_Export]):
@abstractmethod
def export_to_cdf_iterable(
self, rules: T_VerifiedRules, client: CogniteClient, dry_run: bool = False
self, rules: T_VerifiedRules, client: CogniteClient, dry_run: bool = False, fallback_one_by_one: bool = False
) -> Iterable[UploadResult]:
raise NotImplementedError

def export_to_cdf(self, rules: T_VerifiedRules, client: CogniteClient, dry_run: bool = False) -> UploadResultList:
return UploadResultList(self.export_to_cdf_iterable(rules, client, dry_run))
def export_to_cdf(
self, rules: T_VerifiedRules, client: CogniteClient, dry_run: bool = False, fallback_one_by_one: bool = False
) -> UploadResultList:
return UploadResultList(self.export_to_cdf_iterable(rules, client, dry_run, fallback_one_by_one))
70 changes: 61 additions & 9 deletions cognite/neat/_rules/exporters/_rules2dms.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,12 +118,16 @@ def _create_exclude_set(self):
def export(self, rules: DMSRules) -> DMSSchema:
return rules.as_schema(include_pipeline=self.export_pipeline, instance_space=self.instance_space)

def delete_from_cdf(self, rules: DMSRules, client: CogniteClient, dry_run: bool = False) -> Iterable[UploadResult]:
def delete_from_cdf(
self, rules: DMSRules, client: CogniteClient, dry_run: bool = False, skip_space: bool = False
) -> Iterable[UploadResult]:
to_export = self._prepare_exporters(rules, client)

# we need to reverse order in which we are picking up the items to delete
# as they are sorted in the order of creation and we need to delete them in reverse order
for items, loader in reversed(to_export):
if skip_space and isinstance(loader, SpaceLoader):
continue
item_ids = loader.get_ids(items)
existing_items = loader.retrieve(item_ids)
existing_ids = loader.get_ids(existing_items)
Expand Down Expand Up @@ -162,10 +166,15 @@ def delete_from_cdf(self, rules: DMSRules, client: CogniteClient, dry_run: bool
)

def export_to_cdf_iterable(
self, rules: DMSRules, client: CogniteClient, dry_run: bool = False
self, rules: DMSRules, client: CogniteClient, dry_run: bool = False, fallback_one_by_one: bool = False
) -> Iterable[UploadResult]:
to_export = self._prepare_exporters(rules, client)

result_by_name = {}
if self.existing_handling == "force":
for delete_result in self.delete_from_cdf(rules, client, dry_run, skip_space=True):
result_by_name[delete_result.name] = delete_result

redeploy_data_model = False
for items, loader in to_export:
# The conversion from DMS to GraphQL does not seem to be triggered even if the views
Expand All @@ -183,8 +192,10 @@ def export_to_cdf_iterable(
created: set[Hashable] = set()
skipped: set[Hashable] = set()
changed: set[Hashable] = set()
deleted: set[Hashable] = set()
failed_created: set[Hashable] = set()
failed_changed: set[Hashable] = set()
failed_deleted: set[Hashable] = set()
error_messages: list[str] = []
if dry_run:
if self.existing_handling in ["update", "force"]:
Expand All @@ -200,42 +211,83 @@ def export_to_cdf_iterable(
try:
loader.delete(to_delete)
except CogniteAPIError as e:
error_messages.append(f"Failed delete: {e.message}")
if fallback_one_by_one:
for item in to_delete:
try:
loader.delete([item])
except CogniteAPIError as item_e:
failed_deleted.add(loader.get_id(item))
error_messages.append(f"Failed delete: {item_e!s}")
else:
deleted.add(loader.get_id(item))
else:
error_messages.append(f"Failed delete: {e!s}")
failed_deleted.update(loader.get_id(item) for item in e.failed + e.unknown)
else:
deleted.update(loader.get_id(item) for item in to_delete)

if isinstance(loader, DataModelingLoader):
to_create = loader.sort_by_dependencies(to_create)

try:
loader.create(to_create)
except CogniteAPIError as e:
failed_created.update(loader.get_id(item) for item in e.failed + e.unknown)
created.update(loader.get_id(item) for item in e.successful)
error_messages.append(e.message)
if fallback_one_by_one:
for item in to_create:
try:
loader.create([item])
except CogniteAPIError as item_e:
failed_created.add(loader.get_id(item))
error_messages.append(f"Failed create: {item_e!s}")
else:
created.add(loader.get_id(item))
else:
failed_created.update(loader.get_id(item) for item in e.failed + e.unknown)
created.update(loader.get_id(item) for item in e.successful)
error_messages.append(f"Failed create: {e!s}")
else:
created.update(loader.get_id(item) for item in to_create)

if self.existing_handling in ["update", "force"]:
try:
loader.update(to_update)
except CogniteAPIError as e:
failed_changed.update(loader.get_id(item) for item in e.failed + e.unknown)
changed.update(loader.get_id(item) for item in e.successful)
error_messages.append(e.message)
if fallback_one_by_one:
for item in to_update:
try:
loader.update([item])
except CogniteAPIError as e_item:
failed_changed.add(loader.get_id(item))
error_messages.append(f"Failed update: {e_item!s}")
else:
changed.add(loader.get_id(item))
else:
failed_changed.update(loader.get_id(item) for item in e.failed + e.unknown)
changed.update(loader.get_id(item) for item in e.successful)
error_messages.append(f"Failed update: {e!s}")
else:
changed.update(loader.get_id(item) for item in to_update)
elif self.existing_handling == "skip":
skipped.update(loader.get_id(item) for item in to_update)
elif self.existing_handling == "fail":
failed_changed.update(loader.get_id(item) for item in to_update)

if loader.resource_name in result_by_name:
delete_result = result_by_name[loader.resource_name]
deleted.update(delete_result.deleted)
failed_deleted.update(delete_result.failed_deleted)
error_messages.extend(delete_result.error_messages)

yield UploadResult(
name=loader.resource_name,
created=created,
changed=changed,
deleted=deleted,
unchanged={loader.get_id(item) for item in unchanged},
skipped=skipped,
failed_created=failed_created,
failed_changed=failed_changed,
failed_deleted=failed_deleted,
error_messages=error_messages,
issues=issue_list,
)
Expand Down
12 changes: 10 additions & 2 deletions cognite/neat/_session/_to.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,18 @@ def instances(self, space: str | None = None):

return loader.load_into_cdf(self._client)

def data_model(self, existing_handling: Literal["fail", "skip", "update", "force"] = "skip", dry_run: bool = False):
def data_model(
self,
existing_handling: Literal["fail", "skip", "update", "force"] = "skip",
dry_run: bool = False,
fallback_one_by_one: bool = False,
):
"""Export the verified DMS data model to CDF.

Args:
existing_handling: How to handle if component of data model exists. Defaults to "skip".
dry_run: If True, no changes will be made to CDF. Defaults to False.
fallback_one_by_one: If True, will fall back to one-by-one upload if batch upload fails. Defaults to False.

... note::

Expand All @@ -88,7 +94,9 @@ def data_model(self, existing_handling: Literal["fail", "skip", "update", "force

conversion_issues = IssueList(action="to.cdf.data_model")
with catch_warnings(conversion_issues):
result = exporter.export_to_cdf(self._state.last_verified_dms_rules, self._client, dry_run)
result = exporter.export_to_cdf(
self._state.last_verified_dms_rules, self._client, dry_run, fallback_one_by_one
)
result.insert(0, UploadResultCore(name="schema", issues=conversion_issues))
self._state.outcome.append(result)
print("You can inspect the details with the .inspect.outcome(...) method.")
Expand Down
2 changes: 1 addition & 1 deletion cognite/neat/_utils/cdf/loaders/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def __init__(self, client: CogniteClient) -> None:

@classmethod
@abstractmethod
def get_id(cls, item: T_WriteClass | T_WritableCogniteResource) -> T_ID:
def get_id(cls, item: T_WriteClass | T_WritableCogniteResource | dict | T_ID) -> T_ID:
raise NotImplementedError

@classmethod
Expand Down
32 changes: 24 additions & 8 deletions cognite/neat/_utils/cdf/loaders/_data_modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,12 @@ class SpaceLoader(DataModelingLoader[str, SpaceApply, Space, SpaceApplyList, Spa
resource_name = "spaces"

@classmethod
def get_id(cls, item: Space | SpaceApply) -> str:
return item.space
def get_id(cls, item: Space | SpaceApply | str | dict) -> str:
if isinstance(item, Space | SpaceApply):
return item.space
if isinstance(item, dict):
return item["space"]
return item

def create(self, items: Sequence[SpaceApply]) -> SpaceList:
return self.client.data_modeling.spaces.apply(items)
Expand Down Expand Up @@ -149,8 +153,12 @@ def __init__(self, client: CogniteClient, existing_handling: Literal["fail", "sk
self._tried_force_deploy: set[ViewId] = set()

@classmethod
def get_id(cls, item: View | ViewApply) -> ViewId:
return item.as_id()
def get_id(cls, item: View | ViewApply | ViewId | dict) -> ViewId:
if isinstance(item, View | ViewApply):
return item.as_id()
if isinstance(item, dict):
return ViewId.load(item)
return item

def create(self, items: Sequence[ViewApply]) -> ViewList:
if self.existing_handling == "force":
Expand Down Expand Up @@ -247,8 +255,12 @@ def __init__(self, client: CogniteClient, existing_handling: Literal["fail", "sk
self._tried_force_deploy: set[ContainerId] = set()

@classmethod
def get_id(cls, item: Container | ContainerApply) -> ContainerId:
return item.as_id()
def get_id(cls, item: Container | ContainerApply | ContainerId | dict) -> ContainerId:
if isinstance(item, Container | ContainerApply):
return item.as_id()
if isinstance(item, dict):
return ContainerId.load(item)
return item

def sort_by_dependencies(self, items: Sequence[ContainerApply]) -> list[ContainerApply]:
container_by_id = {container.as_id(): container for container in items}
Expand Down Expand Up @@ -292,8 +304,12 @@ class DataModelLoader(DataModelingLoader[DataModelId, DataModelApply, DataModel,
resource_name = "data_models"

@classmethod
def get_id(cls, item: DataModel | DataModelApply) -> DataModelId:
return item.as_id()
def get_id(cls, item: DataModel | DataModelApply | DataModelId | dict) -> DataModelId:
if isinstance(item, DataModel | DataModelApply):
return item.as_id()
if isinstance(item, dict):
return DataModelId.load(item)
return item

def create(self, items: Sequence[DataModelApply]) -> DataModelList:
return self.client.data_modeling.data_models.apply(items)
Expand Down
36 changes: 26 additions & 10 deletions cognite/neat/_utils/cdf/loaders/_ingestion.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,16 @@ class TransformationLoader(
resource_name = "transformations"

@classmethod
def get_id(cls, item: Transformation | TransformationWrite) -> str:
if item.external_id is None:
raise ValueError(f"Transformation {item} does not have an external_id")
return item.external_id
def get_id(cls, item: Transformation | TransformationWrite | str | dict) -> str:
if isinstance(item, Transformation | TransformationWrite):
if item.external_id is None:
raise ValueError(f"Transformation {item} does not have an external_id")
return item.external_id
if isinstance(item, dict):
if item.get("externalId") is None:
raise ValueError(f"Transformation {item} does not have an external_id")
return item["externalId"]
return item

def create(self, items: Sequence[TransformationWrite]) -> TransformationList:
return self.client.transformations.create(items)
Expand All @@ -50,10 +56,16 @@ class RawDatabaseLoader(ResourceLoader[str, DatabaseWrite, Database, DatabaseWri
resource_name = "databases"

@classmethod
def get_id(cls, item: Database | DatabaseWrite) -> str:
if item.name is None:
raise ValueError(f"Database {item} does not have a name")
return item.name
def get_id(cls, item: Database | DatabaseWrite | str | dict) -> str:
if isinstance(item, Database | DatabaseWrite):
if item.name is None:
raise ValueError(f"Database {item} does not have a name")
return item.name
if isinstance(item, dict):
if item.get("name") is None:
raise ValueError(f"Database {item} does not have a name")
return item["name"]
return item

def create(self, items: Sequence[DatabaseWrite]) -> DatabaseList:
return self.client.raw.databases.create([item.name for item in items if item.name is not None])
Expand All @@ -78,8 +90,12 @@ class RawTableLoader(ResourceLoader[RawTableID, RawTableWrite, RawTable, RawTabl
resource_name = "tables"

@classmethod
def get_id(cls, item: RawTable | RawTableWrite) -> RawTableID:
return item.as_id()
def get_id(cls, item: RawTable | RawTableWrite | RawTableID | dict) -> RawTableID:
if isinstance(item, RawTable | RawTableWrite):
return item.as_id()
if isinstance(item, dict):
return RawTableID(database=item["database"], table=item["name"])
return item

@overload
def _groupby_database(self, items: Sequence[RawTableWrite]) -> Iterable[tuple[str, Iterable[RawTableWrite]]]: ...
Expand Down
2 changes: 1 addition & 1 deletion cognite/neat/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.96.4"
__version__ = "0.96.5"
15 changes: 15 additions & 0 deletions docs/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,21 @@ Changes are grouped as follows:
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.

## [0.96.5] - 07-11-**2024**
### Fixed
- Serializing `ResourceNotDefinedError` class no longer raises a `ValueError`. This happens when a `ResourceNotDefinedError`
is found, for example, when calling `neat.verify()`.
- Setting `neat.to.cdf.data_model(existing_handling='force)` will now correctly delete and recreate views and containers
if they already exist in CDF.

### Improved
- When running `neat.to.cdf.data_model()` the entire response from CDF is now stored as an error message, not just the
text.

### Added
- `neat.to.cdf.data_model()` now has a `fallback_one_by_one` parameter. If set to `True`, the views/containers will
be created one by one, if the batch creation fails.

## [0.96.4] - 05-11-**2024**
### Fixed
- `neat.to.excel` or `neat.to.yaml` now correctly writes `ViewTypes` and `Edge` that do not have the default
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "cognite-neat"
version = "0.96.4"
version = "0.96.5"
readme = "README.md"
description = "Knowledge graph transformation"
authors = [
Expand Down
Loading
Loading