diff --git a/cognite/neat/_rules/exporters/_rules2dms.py b/cognite/neat/_rules/exporters/_rules2dms.py index fc8402bdc..14e91904e 100644 --- a/cognite/neat/_rules/exporters/_rules2dms.py +++ b/cognite/neat/_rules/exporters/_rules2dms.py @@ -118,12 +118,16 @@ def _create_exclude_set(self): def export(self, rules: DMSRules) -> DMSSchema: return rules.as_schema(include_pipeline=self.export_pipeline, instance_space=self.instance_space) - def delete_from_cdf(self, rules: DMSRules, client: CogniteClient, dry_run: bool = False) -> Iterable[UploadResult]: + def delete_from_cdf( + self, rules: DMSRules, client: CogniteClient, dry_run: bool = False, skip_space: bool = False + ) -> Iterable[UploadResult]: to_export = self._prepare_exporters(rules, client) # we need to reverse order in which we are picking up the items to delete # as they are sorted in the order of creation and we need to delete them in reverse order for items, loader in reversed(to_export): + if skip_space and isinstance(loader, SpaceLoader): + continue item_ids = loader.get_ids(items) existing_items = loader.retrieve(item_ids) existing_ids = loader.get_ids(existing_items) @@ -168,7 +172,7 @@ def export_to_cdf_iterable( result_by_name = {} if self.existing_handling == "force": - for delete_result in self.delete_from_cdf(rules, client, dry_run): + for delete_result in self.delete_from_cdf(rules, client, dry_run, skip_space=True): result_by_name[delete_result.name] = delete_result redeploy_data_model = False diff --git a/tests/tests_integration/test_rules/test_exporters/test_dms_exporters.py b/tests/tests_integration/test_rules/test_exporters/test_dms_exporters.py index 2bdcc2a3d..639a442af 100644 --- a/tests/tests_integration/test_rules/test_exporters/test_dms_exporters.py +++ b/tests/tests_integration/test_rules/test_exporters/test_dms_exporters.py @@ -170,18 +170,22 @@ def test_export_alice_to_cdf(self, cognite_client: CogniteClient, alice_rules: D uploaded = exporter.export_to_cdf_iterable(rules, cognite_client, dry_run=False) uploaded_by_name = {entity.name: entity for entity in uploaded} - assert uploaded_by_name["containers"].success == len(rules.containers) + assert uploaded_by_name["containers"].success == len(rules.containers) * 2 # 2 x due to delete and create assert uploaded_by_name["containers"].failed == 0 - assert uploaded_by_name["views"].success == len(rules.views) + assert uploaded_by_name["views"].success == len(rules.views) * 2 # 2 x due to delete and create assert uploaded_by_name["views"].failed == 0 - assert uploaded_by_name["data_models"].success == 1 + assert uploaded_by_name["data_models"].success == 1 * 2 # 2 x due to delete and create assert uploaded_by_name["data_models"].failed == 0 - assert uploaded_by_name["spaces"].success == 1 + assert uploaded_by_name["spaces"].success == 1 # Space is not deleted assert uploaded_by_name["spaces"].failed == 0 + @pytest.mark.skip( + "We are not exposing the functionality any more. " + "It is up for discussion if we should keep it. Does the test is not maintained." + ) def test_export_pipeline_populate_and_retrieve_data( self, cognite_client: CogniteClient, table_example: InformationRules, table_example_data: dict[str, list[str]] ) -> None: @@ -259,13 +263,14 @@ def test_export_olav_dms_to_cdf(self, cognite_client: CogniteClient, olav_dms_ru uploaded = exporter.export_to_cdf_iterable(rules, cognite_client, dry_run=False) uploaded_by_name = {entity.name: entity for entity in uploaded} - assert uploaded_by_name["containers"].success == len(rules.containers) + # We have to double the amount of entities due to the delete and create + assert uploaded_by_name["containers"].success == len(rules.containers) * 2 assert uploaded_by_name["containers"].failed == 0 - assert uploaded_by_name["views"].success == len(rules.views) + assert uploaded_by_name["views"].success == len(rules.views) * 2 assert uploaded_by_name["views"].failed == 0 - assert uploaded_by_name["data_models"].success == 1 + assert uploaded_by_name["data_models"].success == 1 * 2 assert uploaded_by_name["data_models"].failed == 0 assert uploaded_by_name["spaces"].success == 1 @@ -292,13 +297,14 @@ def test_export_svein_harald_dms_to_cdf( uploaded = exporter.export_to_cdf(rules, cognite_client, dry_run=False) uploaded_by_name = {entity.name: entity for entity in uploaded} - assert uploaded_by_name["containers"].success == len(rules.containers) + # We have to double the amount of entities due to the delete and create + assert uploaded_by_name["containers"].success == len(rules.containers) * 2 assert uploaded_by_name["containers"].failed == 0 - assert uploaded_by_name["views"].success == len(schema.views) + assert uploaded_by_name["views"].success == len(schema.views) * 2 assert uploaded_by_name["views"].failed == 0 - assert uploaded_by_name["data_models"].success == 1 + assert uploaded_by_name["data_models"].success == 1 * 2 assert uploaded_by_name["data_models"].failed == 0 assert uploaded_by_name["spaces"].success == 1 @@ -355,13 +361,14 @@ def test_export_olav_updated_dms_to_cdf( uploaded = exporter.export_to_cdf_iterable(rules, cognite_client, dry_run=False) uploaded_by_name = {entity.name: entity for entity in uploaded} - assert uploaded_by_name["containers"].success == len(schema.containers) + # We have to double the amount of entities due to the delete and create + assert uploaded_by_name["containers"].success == len(schema.containers) * 2 assert uploaded_by_name["containers"].failed == 0 - assert uploaded_by_name["views"].success == len(schema.views) + assert uploaded_by_name["views"].success == len(schema.views) * 2 assert uploaded_by_name["views"].failed == 0 - assert uploaded_by_name["data_models"].success == 1 + assert uploaded_by_name["data_models"].success == 1 * 2 assert uploaded_by_name["data_models"].failed == 0 assert uploaded_by_name["spaces"].success == 1