diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 8b973249..0c5cea78 100755
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -57,7 +57,4 @@ jobs:
- name: Build
run: python3 setup.py install --user --fmil-home=/usr
- name: Test
- run: |
- rm src/pyfmi/__init__.py
- cp -rv src/pyfmi/tests/files tests
- pytest --verbose tests/
+ run: pytest
diff --git a/CHANGELOG b/CHANGELOG
index 42c73bc0..8fdff3f6 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -6,6 +6,10 @@
* Added option to limit the size of the result ("result_max_size"), default set to 2GB.
* Added method ResultDymolaBinary.get_variables_data. Included some minor refactorization.
The new method allows for retrieving partial trajectories, and multiple trajectories at once.
+ * Changed testing framework from `nose` to `pytest`.
+ * Removed tests from the PyFMI installation.
+ * Moved test files from src/pyfmi/tests/... to tests/files/...
+ * Moved test_util.* from src/pyfmi/tests to src/pyfmi
--- PyFMI-2.14.0 ---
* Updated the error message displayed when loading FMUs with needsExecutionTool set to True.
diff --git a/tests/pytest.ini b/pytest.ini
similarity index 67%
rename from tests/pytest.ini
rename to pytest.ini
index 9cce0b8d..b58567ef 100644
--- a/tests/pytest.ini
+++ b/pytest.ini
@@ -1,3 +1,7 @@
[pytest]
+testpaths =
+ tests
filterwarnings =
ignore:.*does not support directional derivatives.*:UserWarning
+markers =
+ assimulo
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 1e7c91b1..0361e316 100644
--- a/setup.py
+++ b/setup.py
@@ -248,7 +248,7 @@ def check_extensions():
compiler_directives={'language_level' : "3str"})
# Test utilities
- ext_list += cythonize([os.path.join("src", "pyfmi", "tests", "test_util.pyx")],
+ ext_list += cythonize([os.path.join("src", "pyfmi", "test_util.pyx")],
include_path = incl_path,
compiler_directives={'language_level' : "3str"})
@@ -324,15 +324,14 @@ def check_extensions():
classifiers=CLASSIFIERS,
ext_modules = ext_list,
package_dir = {'pyfmi': os.path.join('src', 'pyfmi'),
- 'pyfmi.common': os.path.join('src', 'common'),
- 'pyfmi.tests': 'tests'},
+ 'pyfmi.common': os.path.join('src', 'common')
+ },
packages=[
'pyfmi',
'pyfmi.simulation',
'pyfmi.examples',
'pyfmi.common',
'pyfmi.common.plotting',
- 'pyfmi.tests',
'pyfmi.common.log'
],
package_data = {'pyfmi': [
@@ -340,17 +339,11 @@ def check_extensions():
'examples/files/FMUs/CS1.0/*',
'examples/files/FMUs/ME2.0/*',
'examples/files/FMUs/CS2.0/*',
- 'tests/files/FMUs/XML/ME1.0/*',
- 'tests/files/FMUs/XML/CS1.0/*',
- 'tests/files/FMUs/XML/ME2.0/*',
- 'tests/files/FMUs/XML/CS2.0/*',
- 'tests/files/Results/*',
- 'tests/files/Logs/*',
'version.txt',
'LICENSE',
'CHANGELOG',
- 'util/*'] + extra_package_data,
- 'pyfmi.tests': ['pytest.ini']},
+ 'util/*'] + extra_package_data
+ },
script_args=copy_args
)
diff --git a/src/pyfmi/tests/test_util.pxd b/src/pyfmi/test_util.pxd
similarity index 100%
rename from src/pyfmi/tests/test_util.pxd
rename to src/pyfmi/test_util.pxd
diff --git a/src/pyfmi/tests/test_util.pyx b/src/pyfmi/test_util.pyx
similarity index 99%
rename from src/pyfmi/tests/test_util.pyx
rename to src/pyfmi/test_util.pyx
index e825bba8..10988975 100644
--- a/src/pyfmi/tests/test_util.pyx
+++ b/src/pyfmi/test_util.pyx
@@ -24,7 +24,7 @@ cimport pyfmi.fmil_import as FMIL
from pyfmi.fmi import FMUException, FMUModelME1, FMUModelCS1, FMUModelCS2, FMUModelME2
def get_examples_folder():
- return os.path.join(os.path.dirname(__file__), '..', 'examples')
+ return os.path.join(os.path.dirname(__file__), 'examples')
cdef class _ForTestingFMUModelME1(FMUModelME1):
cdef int _get_nominal_continuous_states_fmil(self, FMIL.fmi1_real_t* xnominal, size_t nx):
diff --git a/src/pyfmi/tests/__init__.py b/src/pyfmi/tests/__init__.py
deleted file mode 100644
index 8962836d..00000000
--- a/src/pyfmi/tests/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright (C) 2024 Modelon AB
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, version 3 of the License.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program. If not, see .
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index e7c78986..00000000
--- a/tests/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright (C) 2018-2024 Modelon AB
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, version 3 of the License.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program. If not, see .
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS1.0/CoupledClutches.fmu b/tests/files/FMUs/XML/CS1.0/CoupledClutches.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS1.0/CoupledClutches.fmu
rename to tests/files/FMUs/XML/CS1.0/CoupledClutches.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS1.0/NegatedAlias.fmu b/tests/files/FMUs/XML/CS1.0/NegatedAlias.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS1.0/NegatedAlias.fmu
rename to tests/files/FMUs/XML/CS1.0/NegatedAlias.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS1.0/bouncingBall.fmu b/tests/files/FMUs/XML/CS1.0/bouncingBall.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS1.0/bouncingBall.fmu
rename to tests/files/FMUs/XML/CS1.0/bouncingBall.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/CoupledClutches.fmu b/tests/files/FMUs/XML/CS2.0/CoupledClutches.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/CoupledClutches.fmu
rename to tests/files/FMUs/XML/CS2.0/CoupledClutches.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/GainTestInteger.fmu b/tests/files/FMUs/XML/CS2.0/GainTestInteger.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/GainTestInteger.fmu
rename to tests/files/FMUs/XML/CS2.0/GainTestInteger.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/GainTestReal.fmu b/tests/files/FMUs/XML/CS2.0/GainTestReal.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/GainTestReal.fmu
rename to tests/files/FMUs/XML/CS2.0/GainTestReal.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/IntegerStep.fmu b/tests/files/FMUs/XML/CS2.0/IntegerStep.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/IntegerStep.fmu
rename to tests/files/FMUs/XML/CS2.0/IntegerStep.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu b/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu
rename to tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu b/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu
rename to tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu b/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu
rename to tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu b/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu
rename to tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu b/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu
rename to tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu b/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu
rename to tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/NegatedAlias.fmu b/tests/files/FMUs/XML/CS2.0/NegatedAlias.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/NegatedAlias.fmu
rename to tests/files/FMUs/XML/CS2.0/NegatedAlias.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/bouncingBall.fmu b/tests/files/FMUs/XML/CS2.0/bouncingBall.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/bouncingBall.fmu
rename to tests/files/FMUs/XML/CS2.0/bouncingBall.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/Alias1.fmu b/tests/files/FMUs/XML/ME1.0/Alias1.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/Alias1.fmu
rename to tests/files/FMUs/XML/ME1.0/Alias1.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/CoupledClutches.fmu b/tests/files/FMUs/XML/ME1.0/CoupledClutches.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/CoupledClutches.fmu
rename to tests/files/FMUs/XML/ME1.0/CoupledClutches.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/Description.fmu b/tests/files/FMUs/XML/ME1.0/Description.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/Description.fmu
rename to tests/files/FMUs/XML/ME1.0/Description.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/NegatedAlias.fmu b/tests/files/FMUs/XML/ME1.0/NegatedAlias.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/NegatedAlias.fmu
rename to tests/files/FMUs/XML/ME1.0/NegatedAlias.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/NoState.Example1.fmu b/tests/files/FMUs/XML/ME1.0/NoState.Example1.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/NoState.Example1.fmu
rename to tests/files/FMUs/XML/ME1.0/NoState.Example1.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/NominalTest4.fmu b/tests/files/FMUs/XML/ME1.0/NominalTest4.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/NominalTest4.fmu
rename to tests/files/FMUs/XML/ME1.0/NominalTest4.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/RLC_Circuit.fmu b/tests/files/FMUs/XML/ME1.0/RLC_Circuit.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/RLC_Circuit.fmu
rename to tests/files/FMUs/XML/ME1.0/RLC_Circuit.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/bouncingBall.fmu b/tests/files/FMUs/XML/ME1.0/bouncingBall.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/bouncingBall.fmu
rename to tests/files/FMUs/XML/ME1.0/bouncingBall.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/dq.fmu b/tests/files/FMUs/XML/ME1.0/dq.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/dq.fmu
rename to tests/files/FMUs/XML/ME1.0/dq.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Alias.fmu b/tests/files/FMUs/XML/ME2.0/Alias.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Alias.fmu
rename to tests/files/FMUs/XML/ME2.0/Alias.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/BasicSens1.fmu b/tests/files/FMUs/XML/ME2.0/BasicSens1.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/BasicSens1.fmu
rename to tests/files/FMUs/XML/ME2.0/BasicSens1.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/BasicSens2.fmu b/tests/files/FMUs/XML/ME2.0/BasicSens2.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/BasicSens2.fmu
rename to tests/files/FMUs/XML/ME2.0/BasicSens2.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu b/tests/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu
rename to tests/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/CoupledClutches.fmu b/tests/files/FMUs/XML/ME2.0/CoupledClutches.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/CoupledClutches.fmu
rename to tests/files/FMUs/XML/ME2.0/CoupledClutches.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu b/tests/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu
rename to tests/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Description.fmu b/tests/files/FMUs/XML/ME2.0/Description.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Description.fmu
rename to tests/files/FMUs/XML/ME2.0/Description.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu b/tests/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu
rename to tests/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Friction2.fmu b/tests/files/FMUs/XML/ME2.0/Friction2.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Friction2.fmu
rename to tests/files/FMUs/XML/ME2.0/Friction2.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Large.fmu b/tests/files/FMUs/XML/ME2.0/Large.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Large.fmu
rename to tests/files/FMUs/XML/ME2.0/Large.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu b/tests/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu
rename to tests/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu b/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu
rename to tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu b/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu
rename to tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStateSpace.fmu b/tests/files/FMUs/XML/ME2.0/LinearStateSpace.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStateSpace.fmu
rename to tests/files/FMUs/XML/ME2.0/LinearStateSpace.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/MalFormed.fmu b/tests/files/FMUs/XML/ME2.0/MalFormed.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/MalFormed.fmu
rename to tests/files/FMUs/XML/ME2.0/MalFormed.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/NegatedAlias.fmu b/tests/files/FMUs/XML/ME2.0/NegatedAlias.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/NegatedAlias.fmu
rename to tests/files/FMUs/XML/ME2.0/NegatedAlias.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/NoState.Example1.fmu b/tests/files/FMUs/XML/ME2.0/NoState.Example1.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/NoState.Example1.fmu
rename to tests/files/FMUs/XML/ME2.0/NoState.Example1.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu b/tests/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu
rename to tests/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/OutputTest2.fmu b/tests/files/FMUs/XML/ME2.0/OutputTest2.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/OutputTest2.fmu
rename to tests/files/FMUs/XML/ME2.0/OutputTest2.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/ParameterAlias.fmu b/tests/files/FMUs/XML/ME2.0/ParameterAlias.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/ParameterAlias.fmu
rename to tests/files/FMUs/XML/ME2.0/ParameterAlias.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu b/tests/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu
rename to tests/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/bouncingBall.fmu b/tests/files/FMUs/XML/ME2.0/bouncingBall.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/bouncingBall.fmu
rename to tests/files/FMUs/XML/ME2.0/bouncingBall.fmu
diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/test_type_definitions.fmu b/tests/files/FMUs/XML/ME2.0/test_type_definitions.fmu
similarity index 100%
rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/test_type_definitions.fmu
rename to tests/files/FMUs/XML/ME2.0/test_type_definitions.fmu
diff --git a/src/pyfmi/tests/files/Logs/CoupledClutches_CS_log.txt b/tests/files/Logs/CoupledClutches_CS_log.txt
similarity index 100%
rename from src/pyfmi/tests/files/Logs/CoupledClutches_CS_log.txt
rename to tests/files/Logs/CoupledClutches_CS_log.txt
diff --git a/src/pyfmi/tests/files/Logs/CoupledClutches_log.txt b/tests/files/Logs/CoupledClutches_log.txt
similarity index 100%
rename from src/pyfmi/tests/files/Logs/CoupledClutches_log.txt
rename to tests/files/Logs/CoupledClutches_log.txt
diff --git a/src/pyfmi/tests/files/Logs/CoupledClutches_log.xml b/tests/files/Logs/CoupledClutches_log.xml
similarity index 100%
rename from src/pyfmi/tests/files/Logs/CoupledClutches_log.xml
rename to tests/files/Logs/CoupledClutches_log.xml
diff --git a/src/pyfmi/tests/files/Logs/boolean_log.xml b/tests/files/Logs/boolean_log.xml
similarity index 100%
rename from src/pyfmi/tests/files/Logs/boolean_log.xml
rename to tests/files/Logs/boolean_log.xml
diff --git a/src/pyfmi/tests/files/Results/DoublePendulum.mat b/tests/files/Results/DoublePendulum.mat
similarity index 100%
rename from src/pyfmi/tests/files/Results/DoublePendulum.mat
rename to tests/files/Results/DoublePendulum.mat
diff --git a/src/pyfmi/tests/files/Results/TestCSV.csv b/tests/files/Results/TestCSV.csv
similarity index 100%
rename from src/pyfmi/tests/files/Results/TestCSV.csv
rename to tests/files/Results/TestCSV.csv
diff --git a/src/pyfmi/tests/files/Results/qt_par_est_data.mat b/tests/files/Results/qt_par_est_data.mat
similarity index 100%
rename from src/pyfmi/tests/files/Results/qt_par_est_data.mat
rename to tests/files/Results/qt_par_est_data.mat
diff --git a/tests/test_fmi.py b/tests/test_fmi.py
index 91054d14..6760808a 100644
--- a/tests/test_fmi.py
+++ b/tests/test_fmi.py
@@ -28,7 +28,7 @@
import pyfmi.fmi as fmi
from pyfmi.fmi_algorithm_drivers import AssimuloFMIAlg, AssimuloFMIAlgOptions, \
PYFMI_JACOBIAN_LIMIT, PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT
-from pyfmi.tests.test_util import Dummy_FMUModelCS1, Dummy_FMUModelME1, Dummy_FMUModelME2, Dummy_FMUModelCS2, get_examples_folder
+from pyfmi.test_util import Dummy_FMUModelCS1, Dummy_FMUModelME1, Dummy_FMUModelME2, Dummy_FMUModelCS2, get_examples_folder
from pyfmi.common.io import ResultHandler
from pyfmi.common.algorithm_drivers import UnrecognizedOptionError
from pyfmi.common.core import create_temp_dir
@@ -44,11 +44,10 @@ def solve(self):
pass
-assimulo_installed = True
try:
import assimulo
except ImportError:
- assimulo_installed = False
+ pass
file_path = os.path.dirname(os.path.abspath(__file__))
@@ -72,107 +71,107 @@ def _helper_unzipped_fmu_exception_invalid_dir(fmu_loader):
with pytest.raises(FMUException, match = err_msg):
fmu = fmu_loader(temp_dir, allow_unzipped_fmu = True)
-if assimulo_installed:
- class Test_FMUModelME1_Simulation:
- def test_simulate_with_debug_option_no_state(self):
- """ Verify that an instance of CVodeDebugInformation is created """
- model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NoState.Example1.fmu"), _connect_dll=False)
+pytest.mark.assimulo
+class Test_FMUModelME1_Simulation:
+ def test_simulate_with_debug_option_no_state(self):
+ """ Verify that an instance of CVodeDebugInformation is created """
+ model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts=model.simulate_options()
- opts["logging"] = True
- opts["result_handling"] = "csv" # set to anything except 'binary'
+ opts=model.simulate_options()
+ opts["logging"] = True
+ opts["result_handling"] = "csv" # set to anything except 'binary'
- #Verify that a simulation is successful
- res=model.simulate(options=opts)
+ #Verify that a simulation is successful
+ res=model.simulate(options=opts)
- from pyfmi.debug import CVodeDebugInformation
- debug = CVodeDebugInformation("NoState_Example1_debug.txt")
+ from pyfmi.debug import CVodeDebugInformation
+ debug = CVodeDebugInformation("NoState_Example1_debug.txt")
- def test_no_result(self):
- model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ def test_no_result(self):
+ model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = None
- res = model.simulate(options=opts)
+ opts = model.simulate_options()
+ opts["result_handling"] = None
+ res = model.simulate(options=opts)
- with pytest.raises(Exception):
- res._get_result_data()
+ with pytest.raises(Exception):
+ res._get_result_data()
- model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["return_result"] = False
- res = model.simulate(options=opts)
+ opts = model.simulate_options()
+ opts["return_result"] = False
+ res = model.simulate(options=opts)
+
+ with pytest.raises(Exception):
+ res._get_result_data()
- with pytest.raises(Exception):
- res._get_result_data()
+ def test_custom_result_handler(self):
+ model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- def test_custom_result_handler(self):
- model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ class A:
+ pass
+ class B(ResultHandler):
+ def get_result(self):
+ return None
- class A:
- pass
- class B(ResultHandler):
- def get_result(self):
- return None
+ opts = model.simulate_options()
+ opts["result_handling"] = "hejhej"
+ with pytest.raises(Exception):
+ model.simulate(options=opts)
+ opts["result_handling"] = "custom"
+ with pytest.raises(Exception):
+ model.simulate(options=opts)
+ opts["result_handler"] = A()
+ with pytest.raises(Exception):
+ model.simulate(options=opts)
+ opts["result_handler"] = B()
+ res = model.simulate(options=opts)
- opts = model.simulate_options()
- opts["result_handling"] = "hejhej"
- with pytest.raises(Exception):
- model.simulate(options=opts)
- opts["result_handling"] = "custom"
- with pytest.raises(Exception):
- model.simulate(options=opts)
- opts["result_handler"] = A()
- with pytest.raises(Exception):
- model.simulate(options=opts)
- opts["result_handler"] = B()
- res = model.simulate(options=opts)
+ def setup_atol_auto_update_test_base(self):
+ model = Dummy_FMUModelME1([], FMU_PATHS.ME1.nominal_test4, _connect_dll=False)
+ model.override_nominal_continuous_states = False
+ opts = model.simulate_options()
+ opts["return_result"] = False
+ opts["solver"] = "CVode"
+ return model, opts
- def setup_atol_auto_update_test_base(self):
- model = Dummy_FMUModelME1([], FMU_PATHS.ME1.nominal_test4, _connect_dll=False)
- model.override_nominal_continuous_states = False
- opts = model.simulate_options()
- opts["return_result"] = False
- opts["solver"] = "CVode"
- return model, opts
-
- def test_atol_auto_update1(self):
- """
- Tests that atol automatically gets updated when "atol = factor * pre_init_nominals".
- """
- model, opts = self.setup_atol_auto_update_test_base()
-
- opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
-
- def test_atol_auto_update2(self):
- """
- Tests that atol doesn't get auto-updated when heuristic fails.
- """
- model, opts = self.setup_atol_auto_update_test_base()
-
- opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01]
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02])
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02])
-
- def test_atol_auto_update3(self):
- """
- Tests that atol doesn't get auto-updated when nominals are never retrieved.
- """
- model, opts = self.setup_atol_auto_update_test_base()
-
- opts["CVode_options"]["atol"] = [0.02, 0.01]
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
-
- # NOTE:
- # There are more tests for ME2 for auto update of atol, but it should be enough to test
- # one FMI version for that, because they mainly test algorithm drivers functionality.
+ def test_atol_auto_update1(self):
+ """
+ Tests that atol automatically gets updated when "atol = factor * pre_init_nominals".
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
+
+ opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
+
+ def test_atol_auto_update2(self):
+ """
+ Tests that atol doesn't get auto-updated when heuristic fails.
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
+
+ opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01]
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02])
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02])
+
+ def test_atol_auto_update3(self):
+ """
+ Tests that atol doesn't get auto-updated when nominals are never retrieved.
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
+
+ opts["CVode_options"]["atol"] = [0.02, 0.01]
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
+
+ # NOTE:
+ # There are more tests for ME2 for auto update of atol, but it should be enough to test
+ # one FMI version for that, because they mainly test algorithm drivers functionality.
class Test_FMUModelME1:
@@ -703,419 +702,419 @@ def test_error_check_invalid_value(self):
assert expected_substr in str(e), f"Error was {str(e)}, expected substring {expected_substr}"
assert error_raised
-if assimulo_installed:
- class Test_FMUModelME2_Simulation:
- def test_basicsens1(self):
- #Noncompliant FMI test as 'd' is parameter is not supposed to be able to be set during simulation
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False)
+@pytest.mark.assimulo
+class Test_FMUModelME2_Simulation:
+ def test_basicsens1(self):
+ #Noncompliant FMI test as 'd' is parameter is not supposed to be able to be set during simulation
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False)
- def f(*args, **kwargs):
- d = model.values[model.variables["d"].value_reference]
- x = model.continuous_states[0]
- model.values[model.variables["der(x)"].value_reference] = d*x
- return np.array([d*x])
+ def f(*args, **kwargs):
+ d = model.values[model.variables["d"].value_reference]
+ x = model.continuous_states[0]
+ model.values[model.variables["der(x)"].value_reference] = d*x
+ return np.array([d*x])
- model.get_derivatives = f
+ model.get_derivatives = f
- opts = model.simulate_options()
- opts["sensitivities"] = ["d"]
+ opts = model.simulate_options()
+ opts["sensitivities"] = ["d"]
- res = model.simulate(options=opts)
- assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3)
+ res = model.simulate(options=opts)
+ assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3)
- assert res.solver.statistics["nsensfcnfcns"] > 0
+ assert res.solver.statistics["nsensfcnfcns"] > 0
- def test_basicsens1dir(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False)
+ def test_basicsens1dir(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False)
- caps = model.get_capability_flags()
- caps["providesDirectionalDerivatives"] = True
- model.get_capability_flags = lambda : caps
+ caps = model.get_capability_flags()
+ caps["providesDirectionalDerivatives"] = True
+ model.get_capability_flags = lambda : caps
- def f(*args, **kwargs):
- d = model.values[model.variables["d"].value_reference]
- x = model.continuous_states[0]
- model.values[model.variables["der(x)"].value_reference] = d*x
- return np.array([d*x])
+ def f(*args, **kwargs):
+ d = model.values[model.variables["d"].value_reference]
+ x = model.continuous_states[0]
+ model.values[model.variables["der(x)"].value_reference] = d*x
+ return np.array([d*x])
+
+ def d(*args, **kwargs):
+ if args[0][0] == 40:
+ return np.array([-1.0])
+ else:
+ return model.continuous_states
+
+ model.get_directional_derivative = d
+ model.get_derivatives = f
+ model._provides_directional_derivatives = lambda : True
- def d(*args, **kwargs):
- if args[0][0] == 40:
- return np.array([-1.0])
- else:
- return model.continuous_states
+ opts = model.simulate_options()
+ opts["sensitivities"] = ["d"]
- model.get_directional_derivative = d
- model.get_derivatives = f
- model._provides_directional_derivatives = lambda : True
+ res = model.simulate(options=opts)
+ assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3)
- opts = model.simulate_options()
- opts["sensitivities"] = ["d"]
+ assert res.solver.statistics["nsensfcnfcns"] > 0
+ assert res.solver.statistics["nfcnjacs"] == 0
- res = model.simulate(options=opts)
- assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3)
+ def test_basicsens2(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens2.fmu"), _connect_dll=False)
- assert res.solver.statistics["nsensfcnfcns"] > 0
- assert res.solver.statistics["nfcnjacs"] == 0
+ caps = model.get_capability_flags()
+ caps["providesDirectionalDerivatives"] = True
+ model.get_capability_flags = lambda : caps
- def test_basicsens2(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens2.fmu"), _connect_dll=False)
+ def f(*args, **kwargs):
+ d = model.values[model.variables["d"].value_reference]
+ x = model.continuous_states[0]
+ model.values[model.variables["der(x)"].value_reference] = d*x
+ return np.array([d*x])
+
+ def d(*args, **kwargs):
+ if args[0][0] == 40:
+ return np.array([-1.0])
+ else:
+ return model.continuous_states
+
+ model.get_directional_derivative = d
+ model.get_derivatives = f
+ model._provides_directional_derivatives = lambda : True
- caps = model.get_capability_flags()
- caps["providesDirectionalDerivatives"] = True
- model.get_capability_flags = lambda : caps
+ opts = model.simulate_options()
+ opts["sensitivities"] = ["d"]
- def f(*args, **kwargs):
- d = model.values[model.variables["d"].value_reference]
- x = model.continuous_states[0]
- model.values[model.variables["der(x)"].value_reference] = d*x
- return np.array([d*x])
+ res = model.simulate(options=opts)
+ assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3)
- def d(*args, **kwargs):
- if args[0][0] == 40:
- return np.array([-1.0])
- else:
- return model.continuous_states
+ assert res.solver.statistics["nsensfcnfcns"] == 0
- model.get_directional_derivative = d
- model.get_derivatives = f
- model._provides_directional_derivatives = lambda : True
+ def test_relative_tolerance(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["sensitivities"] = ["d"]
+ opts = model.simulate_options()
+ opts["CVode_options"]["rtol"] = 1e-8
- res = model.simulate(options=opts)
- assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3)
+ res = model.simulate(options=opts)
- assert res.solver.statistics["nsensfcnfcns"] == 0
+ assert res.options["CVode_options"]["atol"] == 1e-10
- def test_relative_tolerance(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ def test_simulate_with_debug_option_no_state(self):
+ """ Verify that an instance of CVodeDebugInformation is created """
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["CVode_options"]["rtol"] = 1e-8
+ opts=model.simulate_options()
+ opts["logging"] = True
+ opts["result_handling"] = "csv" # set to anything except 'binary'
- res = model.simulate(options=opts)
+ #Verify that a simulation is successful
+ res=model.simulate(options=opts)
+
+ from pyfmi.debug import CVodeDebugInformation
+ debug = CVodeDebugInformation("NoState_Example1_debug.txt")
+
+ def test_maxord_is_set(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ opts = model.simulate_options()
+ opts["solver"] = "CVode"
+ opts["CVode_options"]["maxord"] = 1
- assert res.options["CVode_options"]["atol"] == 1e-10
+ res = model.simulate(final_time=1.5,options=opts)
- def test_simulate_with_debug_option_no_state(self):
- """ Verify that an instance of CVodeDebugInformation is created """
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ assert res.solver.maxord == 1
- opts=model.simulate_options()
- opts["logging"] = True
- opts["result_handling"] = "csv" # set to anything except 'binary'
+ def test_with_jacobian_option(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ opts = model.simulate_options()
+ opts["solver"] = "CVode"
+ opts["result_handling"] = None
- #Verify that a simulation is successful
- res=model.simulate(options=opts)
+ def run_case(expected, default="Default"):
+ model.reset()
+ res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg)
+ assert res.options["with_jacobian"] == default, res.options["with_jacobian"]
+ assert res.solver.problem._with_jacobian == expected, res.solver.problem._with_jacobian
- from pyfmi.debug import CVodeDebugInformation
- debug = CVodeDebugInformation("NoState_Example1_debug.txt")
+ run_case(False)
- def test_maxord_is_set(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["solver"] = "CVode"
- opts["CVode_options"]["maxord"] = 1
+ model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT+1, 0)
+ run_case(True)
- res = model.simulate(final_time=1.5,options=opts)
+ opts["solver"] = "Radau5ODE"
+ run_case(False)
- assert res.solver.maxord == 1
+ opts["solver"] = "CVode"
+ opts["with_jacobian"] = False
+ run_case(False, False)
- def test_with_jacobian_option(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT-1, 0)
+ opts["with_jacobian"] = True
+ run_case(True, True)
+
+ def test_sparse_option(self):
+
+ def run_case(expected_jacobian, expected_sparse, fnbr=0, nnz={}, set_sparse=False):
+ class Sparse_FMUModelME2(Dummy_FMUModelME2):
+ def get_derivatives_dependencies(self):
+ return (nnz, {})
+
+ model = Sparse_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
opts = model.simulate_options()
opts["solver"] = "CVode"
opts["result_handling"] = None
+ if set_sparse:
+ opts["CVode_options"]["linear_solver"] = "SPARSE"
- def run_case(expected, default="Default"):
- model.reset()
- res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg)
- assert res.options["with_jacobian"] == default, res.options["with_jacobian"]
- assert res.solver.problem._with_jacobian == expected, res.solver.problem._with_jacobian
+ model.get_ode_sizes = lambda: (fnbr, 0)
- run_case(False)
+ res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg)
+ assert res.solver.problem._with_jacobian == expected_jacobian, res.solver.problem._with_jacobian
+ assert res.solver.linear_solver == expected_sparse, res.solver.linear_solver
- model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT+1, 0)
- run_case(True)
+ run_case(False, "DENSE")
+ run_case(True, "DENSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT**2})
+ run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT})
+ run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}, True)
- opts["solver"] = "Radau5ODE"
- run_case(False)
+ def test_ncp_option(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ opts = model.simulate_options()
+ assert opts["ncp"] == 500, opts["ncp"]
- opts["solver"] = "CVode"
- opts["with_jacobian"] = False
- run_case(False, False)
+ def test_solver_options(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ opts = model.simulate_options()
- model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT-1, 0)
- opts["with_jacobian"] = True
- run_case(True, True)
+ try:
+ opts["CVode_options"] = "ShouldFail"
+ raise Exception("Setting an incorrect option should lead to exception being thrown, it wasn't")
+ except UnrecognizedOptionError:
+ pass
- def test_sparse_option(self):
+ opts["CVode_options"] = {"maxh":1.0}
+ assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"]
+ assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"]
- def run_case(expected_jacobian, expected_sparse, fnbr=0, nnz={}, set_sparse=False):
- class Sparse_FMUModelME2(Dummy_FMUModelME2):
- def get_derivatives_dependencies(self):
- return (nnz, {})
+ def test_solver_options_using_defaults(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ opts = model.simulate_options()
- model = Sparse_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["solver"] = "CVode"
- opts["result_handling"] = None
- if set_sparse:
- opts["CVode_options"]["linear_solver"] = "SPARSE"
+ opts["CVode_options"] = {"maxh":1.0}
+ assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"]
+ assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"]
- model.get_ode_sizes = lambda: (fnbr, 0)
+ opts["CVode_options"] = {"atol":1e-6} #Defaults should be used together with only the option atol set
+ assert opts["CVode_options"]["atol"] == 1e-6, "Default should have been changed: " + opts["CVode_options"]["atol"]
+ assert opts["CVode_options"]["maxh"] == "Default", "Value should have been default is: " + opts["CVode_options"]["maxh"]
- res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg)
- assert res.solver.problem._with_jacobian == expected_jacobian, res.solver.problem._with_jacobian
- assert res.solver.linear_solver == expected_sparse, res.solver.linear_solver
+ def test_deepcopy_option(self):
+ opts = AssimuloFMIAlgOptions()
+ opts["CVode_options"]["maxh"] = 2.0
- run_case(False, "DENSE")
- run_case(True, "DENSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT**2})
- run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT})
- run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}, True)
+ import copy
- def test_ncp_option(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- assert opts["ncp"] == 500, opts["ncp"]
+ opts_copy = copy.deepcopy(opts)
- def test_solver_options(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts = model.simulate_options()
+ assert opts["CVode_options"]["maxh"] == opts_copy["CVode_options"]["maxh"], "Deepcopy not working..."
- try:
- opts["CVode_options"] = "ShouldFail"
- raise Exception("Setting an incorrect option should lead to exception being thrown, it wasn't")
- except UnrecognizedOptionError:
- pass
+ def test_maxh_option(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
+ opts = model.simulate_options()
+ opts["result_handling"] = None
- opts["CVode_options"] = {"maxh":1.0}
- assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"]
- assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"]
+ def run_case(tstart, tstop, solver, ncp="Default"):
+ model.reset()
- def test_solver_options_using_defaults(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts = model.simulate_options()
+ opts["solver"] = solver
- opts["CVode_options"] = {"maxh":1.0}
- assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"]
- assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"]
+ if ncp != "Default":
+ opts["ncp"] = ncp
- opts["CVode_options"] = {"atol":1e-6} #Defaults should be used together with only the option atol set
- assert opts["CVode_options"]["atol"] == 1e-6, "Default should have been changed: " + opts["CVode_options"]["atol"]
- assert opts["CVode_options"]["maxh"] == "Default", "Value should have been default is: " + opts["CVode_options"]["maxh"]
+ if opts["ncp"] == 0:
+ expected = 0.0
+ else:
+ expected = (float(tstop)-float(tstart))/float(opts["ncp"])
- def test_deepcopy_option(self):
- opts = AssimuloFMIAlgOptions()
- opts["CVode_options"]["maxh"] = 2.0
+ res = model.simulate(start_time=tstart, final_time=tstop,options=opts, algorithm=NoSolveAlg)
+ assert res.solver.maxh == expected, res.solver.maxh
+ assert res.options[solver+"_options"]["maxh"] == "Default", res.options[solver+"_options"]["maxh"]
- import copy
+ run_case(0,1,"CVode")
+ run_case(0,1,"CVode", 0)
+ run_case(0,1,"Radau5ODE")
+ run_case(0,1,"Dopri5")
+ run_case(0,1,"RodasODE")
+ run_case(0,1,"LSODAR")
+ run_case(0,1,"LSODAR")
+
+ def test_rtol_auto_update(self):
+ """ Test that default rtol picks up the unbounded attribute. """
+ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.coupled_clutches_modified, _connect_dll=False)
+
+ res = model.simulate()
+
+ # verify appropriate rtol(s)
+ for i, state in enumerate(model.get_states_list().keys()):
+ if res.solver.supports.get('rtol_as_vector', False):
+ # automatic construction of rtol vector
+ if model.get_variable_unbounded(state):
+ assert res.solver.rtol[i] == 0
+ else:
+ assert res.solver.rtol[i] > 0
+ else: # no support: scalar rtol
+ assert isinstance(res.solver.rtol, float)
- opts_copy = copy.deepcopy(opts)
+ def test_rtol_vector_manual_valid(self):
+ """ Tests manual valid rtol vector works; if supported. """
- assert opts["CVode_options"]["maxh"] == opts_copy["CVode_options"]["maxh"], "Deepcopy not working..."
+ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
+
+ opts = model.simulate_options()
+ opts["CVode_options"]["rtol"] = [1e-5, 0.]
+
+ try:
+ res = model.simulate(options=opts)
+ # solver support
+ assert res.solver.rtol[0] == 1e-5
+ assert res.solver.rtol[1] == 0.
+ except InvalidOptionException as e: # if no solver support
+ assert str(e).startswith("Failed to set the solver option 'rtol'")
+
+ def test_rtol_vector_manual_size_mismatch(self):
+ """ Tests invalid rtol vector: size mismatch. """
+ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
+
+ opts = model.simulate_options()
+ opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5]
+
+ err_msg = "If the relative tolerance is provided as a vector, it need to be equal to the number of states."
+ with pytest.raises(InvalidOptionException, match = err_msg):
+ model.simulate(options=opts)
- def test_maxh_option(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = None
+ def test_rtol_vector_manual_invalid(self):
+ """ Tests invalid rtol vector: different nonzero values. """
+
+ model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False)
- def run_case(tstart, tstop, solver, ncp="Default"):
- model.reset()
+ opts = model.simulate_options()
+ opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5, 1e-5, 0, 1e-5,1e-6, 0]
+
+ err_msg = "If the relative tolerance is provided as a vector, the values need to be equal except for zeros."
+ with pytest.raises(InvalidOptionException, match = err_msg):
+ model.simulate(options=opts)
- opts["solver"] = solver
+ def test_rtol_vector_manual_scalar_conversion(self):
+ """ Test automatic scalar conversion of trivial rtol vector. """
+ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
+
+ opts = model.simulate_options()
+ opts["CVode_options"]["rtol"] = [1e-5, 1e-5]
+
+ #Verify no exception is raised as the rtol vector should be treated as a scalar
+ res = model.simulate(options=opts)
+ assert res.solver.rtol == 1e-5
+
+ def test_rtol_vector_unsupported(self):
+ """ Test that rtol as a vector triggers exceptions for unsupported solvers. """
+ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
+ opts = model.simulate_options()
+ opts["result_handling"] = None
- if ncp != "Default":
- opts["ncp"] = ncp
+ def run_case(solver):
+ model.reset()
- if opts["ncp"] == 0:
- expected = 0.0
- else:
- expected = (float(tstop)-float(tstart))/float(opts["ncp"])
-
- res = model.simulate(start_time=tstart, final_time=tstop,options=opts, algorithm=NoSolveAlg)
- assert res.solver.maxh == expected, res.solver.maxh
- assert res.options[solver+"_options"]["maxh"] == "Default", res.options[solver+"_options"]["maxh"]
-
- run_case(0,1,"CVode")
- run_case(0,1,"CVode", 0)
- run_case(0,1,"Radau5ODE")
- run_case(0,1,"Dopri5")
- run_case(0,1,"RodasODE")
- run_case(0,1,"LSODAR")
- run_case(0,1,"LSODAR")
-
- def test_rtol_auto_update(self):
- """ Test that default rtol picks up the unbounded attribute. """
- model = Dummy_FMUModelME2([], FMU_PATHS.ME2.coupled_clutches_modified, _connect_dll=False)
-
- res = model.simulate()
-
- # verify appropriate rtol(s)
- for i, state in enumerate(model.get_states_list().keys()):
- if res.solver.supports.get('rtol_as_vector', False):
- # automatic construction of rtol vector
- if model.get_variable_unbounded(state):
- assert res.solver.rtol[i] == 0
- else:
- assert res.solver.rtol[i] > 0
- else: # no support: scalar rtol
- assert isinstance(res.solver.rtol, float)
-
- def test_rtol_vector_manual_valid(self):
- """ Tests manual valid rtol vector works; if supported. """
-
- model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
-
- opts = model.simulate_options()
- opts["CVode_options"]["rtol"] = [1e-5, 0.]
+ opts["solver"] = solver
+ opts[solver+"_options"]["rtol"] = [1e-5, 0.0]
try:
res = model.simulate(options=opts)
- # solver support
+ # solver support; check tolerances
assert res.solver.rtol[0] == 1e-5
- assert res.solver.rtol[1] == 0.
- except InvalidOptionException as e: # if no solver support
+ assert res.solver.rtol[1] == 0.0
+ except InvalidOptionException as e:
assert str(e).startswith("Failed to set the solver option 'rtol'")
-
- def test_rtol_vector_manual_size_mismatch(self):
- """ Tests invalid rtol vector: size mismatch. """
- model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
-
- opts = model.simulate_options()
- opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5]
-
- err_msg = "If the relative tolerance is provided as a vector, it need to be equal to the number of states."
- with pytest.raises(InvalidOptionException, match = err_msg):
- model.simulate(options=opts)
+ return # OK
- def test_rtol_vector_manual_invalid(self):
- """ Tests invalid rtol vector: different nonzero values. """
-
- model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False)
+ run_case("CVode")
+ run_case("Radau5ODE")
+ run_case("Dopri5")
+ run_case("RodasODE")
+ run_case("LSODAR")
+
+ def setup_atol_auto_update_test_base(self):
+ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
+ model.override_nominal_continuous_states = False
+ opts = model.simulate_options()
+ opts["return_result"] = False
+ opts["solver"] = "CVode"
+ return model, opts
- opts = model.simulate_options()
- opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5, 1e-5, 0, 1e-5,1e-6, 0]
-
- err_msg = "If the relative tolerance is provided as a vector, the values need to be equal except for zeros."
- with pytest.raises(InvalidOptionException, match = err_msg):
- model.simulate(options=opts)
+ def test_atol_auto_update1(self):
+ """
+ Tests that atol automatically gets updated when "atol = factor * pre_init_nominals".
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
- def test_rtol_vector_manual_scalar_conversion(self):
- """ Test automatic scalar conversion of trivial rtol vector. """
- model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
-
- opts = model.simulate_options()
- opts["CVode_options"]["rtol"] = [1e-5, 1e-5]
-
- #Verify no exception is raised as the rtol vector should be treated as a scalar
- res = model.simulate(options=opts)
- assert res.solver.rtol == 1e-5
+ opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
+
+ def test_atol_auto_update2(self):
+ """
+ Tests that atol doesn't get auto-updated when heuristic fails.
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
+
+ opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01]
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02])
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02])
+
+ def test_atol_auto_update3(self):
+ """
+ Tests that atol doesn't get auto-updated when nominals are never retrieved.
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
+
+ opts["CVode_options"]["atol"] = [0.02, 0.01]
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
+
+ def test_atol_auto_update4(self):
+ """
+ Tests that atol is not auto-updated when it's set the "correct" way (post initialization).
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
- def test_rtol_vector_unsupported(self):
- """ Test that rtol as a vector triggers exceptions for unsupported solvers. """
- model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = None
+ model.setup_experiment()
+ model.initialize()
+ opts["initialize"] = False
+ opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
- def run_case(solver):
- model.reset()
-
- opts["solver"] = solver
- opts[solver+"_options"]["rtol"] = [1e-5, 0.0]
-
- try:
- res = model.simulate(options=opts)
- # solver support; check tolerances
- assert res.solver.rtol[0] == 1e-5
- assert res.solver.rtol[1] == 0.0
- except InvalidOptionException as e:
- assert str(e).startswith("Failed to set the solver option 'rtol'")
- return # OK
-
- run_case("CVode")
- run_case("Radau5ODE")
- run_case("Dopri5")
- run_case("RodasODE")
- run_case("LSODAR")
+ def test_atol_auto_update5(self):
+ """
+ Tests that atol is automatically set and depends on rtol.
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
- def setup_atol_auto_update_test_base(self):
- model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False)
- model.override_nominal_continuous_states = False
- opts = model.simulate_options()
- opts["return_result"] = False
- opts["solver"] = "CVode"
- return model, opts
-
- def test_atol_auto_update1(self):
- """
- Tests that atol automatically gets updated when "atol = factor * pre_init_nominals".
- """
- model, opts = self.setup_atol_auto_update_test_base()
-
- opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
-
- def test_atol_auto_update2(self):
- """
- Tests that atol doesn't get auto-updated when heuristic fails.
- """
- model, opts = self.setup_atol_auto_update_test_base()
-
- opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01]
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02])
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02])
-
- def test_atol_auto_update3(self):
- """
- Tests that atol doesn't get auto-updated when nominals are never retrieved.
- """
- model, opts = self.setup_atol_auto_update_test_base()
-
- opts["CVode_options"]["atol"] = [0.02, 0.01]
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
-
- def test_atol_auto_update4(self):
- """
- Tests that atol is not auto-updated when it's set the "correct" way (post initialization).
- """
- model, opts = self.setup_atol_auto_update_test_base()
-
- model.setup_experiment()
- model.initialize()
- opts["initialize"] = False
- opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
-
- def test_atol_auto_update5(self):
- """
- Tests that atol is automatically set and depends on rtol.
- """
- model, opts = self.setup_atol_auto_update_test_base()
-
- opts["CVode_options"]["rtol"] = 1e-6
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [3e-8, 3e-8])
+ opts["CVode_options"]["rtol"] = 1e-6
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [3e-8, 3e-8])
- def test_atol_auto_update6(self):
- """
- Tests that rtol doesn't affect explicitly set atol.
- """
- model, opts = self.setup_atol_auto_update_test_base()
+ def test_atol_auto_update6(self):
+ """
+ Tests that rtol doesn't affect explicitly set atol.
+ """
+ model, opts = self.setup_atol_auto_update_test_base()
- opts["CVode_options"]["rtol"] = 1e-9
- opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
- model.simulate(options=opts, algorithm=NoSolveAlg)
- np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
+ opts["CVode_options"]["rtol"] = 1e-9
+ opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01])
+ model.simulate(options=opts, algorithm=NoSolveAlg)
+ np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03])
class Test_FMUModelME2:
@@ -1153,17 +1152,17 @@ def test_unzipped_fmu_exceptions(self):
""" Verify exception is raised if 'fmu' is a file and allow_unzipped_fmu is set to True, with FMUModelME2. """
err_msg = "Argument named 'fmu' must be a directory if argument 'allow_unzipped_fmu' is set to True."
with pytest.raises(FMUException, match = err_msg):
- model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=False, allow_unzipped_fmu=True)
+ FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=False, allow_unzipped_fmu=True)
def test_invalid_binary(self):
err_msg = "The FMU could not be loaded."
with pytest.raises(InvalidBinaryException, match = err_msg):
- model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=True)
+ FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=True)
def test_invalid_version(self):
err_msg = "The FMU version is not supported by this class"
with pytest.raises(InvalidVersionException, match = err_msg):
- model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu"), _connect_dll=True)
+ FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu"), _connect_dll=True)
def test_estimate_directional_derivatives_linearstate(self):
model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStateSpace.fmu"), _connect_dll=False)
diff --git a/tests/test_fmi_coupled.py b/tests/test_fmi_coupled.py
index 08899f8a..d1fe625e 100644
--- a/tests/test_fmi_coupled.py
+++ b/tests/test_fmi_coupled.py
@@ -15,81 +15,78 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-import pytest
import os
+import pytest
from pyfmi.fmi import FMUModelME2
from pyfmi.fmi_coupled import CoupledFMUModelME2
import pyfmi.fmi as fmi
-from pyfmi.tests.test_util import Dummy_FMUModelME2
+from pyfmi.test_util import Dummy_FMUModelME2
-assimulo_installed = True
try:
import assimulo
except ImportError:
- assimulo_installed = False
+ pass
file_path = os.path.dirname(os.path.abspath(__file__))
-
me2_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0")
-if assimulo_installed:
- class Test_CoupledFMUModelME2_Simulation:
- def test_linear_example(self):
- model_sub_1 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False)
- model_sub_2 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False)
+@pytest.mark.assimulo
+class Test_CoupledFMUModelME2_Simulation:
+ def test_linear_example(self):
+ model_sub_1 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False)
+ model_sub_2 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False)
- def sub1(*args, **kwargs):
- u1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("u1")], evaluate = False)
- a1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("a1")], evaluate = False)
- b1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("b1")], evaluate = False)
- c1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("c1")], evaluate = False)
- d1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("d1")], evaluate = False)
- x1 = model_sub_1.continuous_states[0]
- model_sub_1.set_real([model_sub_1.get_variable_valueref("y1")], c1*x1+d1*u1)
- model_sub_1.set_real([model_sub_1.get_variable_valueref("x1")], [x1])
- return a1*x1+b1*u1
-
- def sub2(*args, **kwargs):
- u2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("u2")], evaluate = False)
- a2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("a2")], evaluate = False)
- b2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("b2")], evaluate = False)
- c2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("c2")], evaluate = False)
- d2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("d2")], evaluate = False)
- x2 = model_sub_2.continuous_states[0]
- model_sub_2.set_real([model_sub_2.get_variable_valueref("y2")], c2*x2+d2*u2)
- model_sub_2.set_real([model_sub_2.get_variable_valueref("x2")], [x2])
- return a2*x2+b2*u2
-
- model_sub_1.get_derivatives = sub1
- model_sub_2.get_derivatives = sub2
-
- models = [("First", model_sub_1), ("Second", model_sub_2)]
- connections = [(model_sub_1,"y1",model_sub_2,"u2"),
- (model_sub_2,"y2",model_sub_1,"u1")]
-
- coupled = CoupledFMUModelME2(models, connections=connections)
+ def sub1(*args, **kwargs):
+ u1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("u1")], evaluate = False)
+ a1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("a1")], evaluate = False)
+ b1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("b1")], evaluate = False)
+ c1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("c1")], evaluate = False)
+ d1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("d1")], evaluate = False)
+ x1 = model_sub_1.continuous_states[0]
+ model_sub_1.set_real([model_sub_1.get_variable_valueref("y1")], c1*x1+d1*u1)
+ model_sub_1.set_real([model_sub_1.get_variable_valueref("x1")], [x1])
+ return a1*x1+b1*u1
+
+ def sub2(*args, **kwargs):
+ u2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("u2")], evaluate = False)
+ a2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("a2")], evaluate = False)
+ b2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("b2")], evaluate = False)
+ c2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("c2")], evaluate = False)
+ d2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("d2")], evaluate = False)
+ x2 = model_sub_2.continuous_states[0]
+ model_sub_2.set_real([model_sub_2.get_variable_valueref("y2")], c2*x2+d2*u2)
+ model_sub_2.set_real([model_sub_2.get_variable_valueref("x2")], [x2])
+ return a2*x2+b2*u2
+
+ model_sub_1.get_derivatives = sub1
+ model_sub_2.get_derivatives = sub2
+
+ models = [("First", model_sub_1), ("Second", model_sub_2)]
+ connections = [(model_sub_1,"y1",model_sub_2,"u2"),
+ (model_sub_2,"y2",model_sub_1,"u1")]
+
+ coupled = CoupledFMUModelME2(models, connections=connections)
- opts = {"CVode_options": {"rtol":1e-6, "atol":1e-6}, "ncp":0}
+ opts = {"CVode_options": {"rtol":1e-6, "atol":1e-6}, "ncp":0}
- res = coupled.simulate(options=opts)
+ res = coupled.simulate(options=opts)
- assert res.final("First.x1") == pytest.approx(0.08597302307099872)
- assert res.final("Second.x2") == pytest.approx(0.0083923348082567)
- assert res.initial("First.x1") == pytest.approx(1.0)
- assert res.initial("Second.x2") == pytest.approx(1.0)
-
- assert res.final("First.u1") == pytest.approx(-0.25909975860402856)
- assert res.final("Second.u2") == pytest.approx(-0.0011806893910324295)
- assert res.initial("First.u1") == pytest.approx(-17.736842105263158)
- assert res.initial("Second.u2") == pytest.approx(-14.73684210526316)
+ assert res.final("First.x1") == pytest.approx(0.08597302307099872)
+ assert res.final("Second.x2") == pytest.approx(0.0083923348082567)
+ assert res.initial("First.x1") == pytest.approx(1.0)
+ assert res.initial("Second.x2") == pytest.approx(1.0)
+
+ assert res.final("First.u1") == pytest.approx(-0.25909975860402856)
+ assert res.final("Second.u2") == pytest.approx(-0.0011806893910324295)
+ assert res.initial("First.u1") == pytest.approx(-17.736842105263158)
+ assert res.initial("Second.u2") == pytest.approx(-14.73684210526316)
class Test_CoupledFMUModelME2:
def test_reversed_connections(self):
model_sub_1 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False)
model_sub_2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False)
- model_full = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.FullSystem.fmu"), _connect_dll=False)
models = [("First", model_sub_1), ("Second", model_sub_2)]
connections = [(model_sub_2,"y1",model_sub_1,"u2"),
@@ -107,7 +104,6 @@ def test_reversed_connections(self):
def test_inputs_list(self):
model_sub_1 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False)
model_sub_2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False)
- model_full = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.FullSystem.fmu"), _connect_dll=False)
models = [("First", model_sub_1), ("Second", model_sub_2)]
connections = [(model_sub_1,"y1",model_sub_2,"u2"),
@@ -156,7 +152,6 @@ def test_loading(self):
CoupledFMUModelME2(models, connections)
models = [("First", model_cc_1), ("Second", model_cc_2)]
- coupled = CoupledFMUModelME2(models, connections)
connections = [("k")]
with pytest.raises(fmi.FMUException):
diff --git a/tests/test_fmi_estimate.py b/tests/test_fmi_estimate.py
index 7477dd0b..5e5dcbfe 100644
--- a/tests/test_fmi_estimate.py
+++ b/tests/test_fmi_estimate.py
@@ -16,93 +16,93 @@
# along with this program. If not, see .
import os
+import pytest
import numpy as np
-from pyfmi.tests.test_util import Dummy_FMUModelME2
+from pyfmi.test_util import Dummy_FMUModelME2
from scipy.io.matlab import loadmat
-assimulo_installed = True
try:
import assimulo
except ImportError:
- assimulo_installed = False
+ pass
file_path = os.path.dirname(os.path.abspath(__file__))
-if assimulo_installed:
- class Test_FMUModelME2_Estimate:
- def test_quadtank_estimate(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "QuadTankPack_Sim_QuadTank.fmu"), _connect_dll=False)
-
- g = model.get_real([model.get_variable_valueref("qt.g")], evaluate = False)
- g1_nmp = model.get_real([model.get_variable_valueref("qt.g1_nmp")], evaluate = False)
- g2_nmp = model.get_real([model.get_variable_valueref("qt.g2_nmp")], evaluate = False)
- k1_nmp = model.get_real([model.get_variable_valueref("qt.k1_nmp")], evaluate = False)
- k2_nmp = model.get_real([model.get_variable_valueref("qt.k2_nmp")], evaluate = False)
- A1 = model.get_real([model.get_variable_valueref("qt.A1")], evaluate = False)
- A2 = model.get_real([model.get_variable_valueref("qt.A2")], evaluate = False)
- A3 = model.get_real([model.get_variable_valueref("qt.A3")], evaluate = False)
- A4 = model.get_real([model.get_variable_valueref("qt.A4")], evaluate = False)
- a3 = model.get_real([model.get_variable_valueref("qt.a3")], evaluate = False)
- a4 = model.get_real([model.get_variable_valueref("qt.a4")], evaluate = False)
- u1_vref = model.get_variable_valueref("u1")
- u2_vref = model.get_variable_valueref("u2")
- a1_vref = model.get_variable_valueref("qt.a1")
- a2_vref = model.get_variable_valueref("qt.a2")
-
- def f(*args, **kwargs):
- x1 = model.continuous_states[0]
- x2 = model.continuous_states[1]
- x3 = model.continuous_states[2]
- x4 = model.continuous_states[3]
-
- u1 = model.get_real([u1_vref], evaluate = False)
- u2 = model.get_real([u2_vref], evaluate = False)
- a1 = model.get_real([a1_vref], evaluate = False)
- a2 = model.get_real([a2_vref], evaluate = False)
-
- der_x1 = -a1/A1*np.sqrt(2.*g*x1) + a3/A1*np.sqrt(2*g*x3) + g1_nmp*k1_nmp/A1*u1
- der_x2 = -a2/A2*np.sqrt(2.*g*x2) + a4/A2*np.sqrt(2*g*x4) + g2_nmp*k2_nmp/A2*u2
- der_x3 = -a3/A3*np.sqrt(2.*g*x3) + (1.-g2_nmp)*k2_nmp/A3*u2
- der_x4 = -a4/A4*np.sqrt(2.*g*x4) + (1.-g1_nmp)*k1_nmp/A4*u1
- return np.concatenate([der_x1, der_x2, der_x3, der_x4])
-
- model.get_derivatives = f
-
- # Load measurement data from file
- data = loadmat(os.path.join(file_path, "files", "Results", "qt_par_est_data.mat"), appendmat=False)
-
- # Extract data series
- t_meas = data['t'][6000::100,0]-60
- y1_meas = data['y1_f'][6000::100,0]/100
- y2_meas = data['y2_f'][6000::100,0]/100
- y3_meas = data['y3_d'][6000::100,0]/100
- y4_meas = data['y4_d'][6000::100,0]/100
- u1 = data['u1_d'][6000::100,0]
- u2 = data['u2_d'][6000::100,0]
-
- # Build input trajectory matrix for use in simulation
- u = np.transpose(np.vstack((t_meas,u1,u2)))
-
- # Estimation of 2 parameters
- data = np.vstack((t_meas, y1_meas, y2_meas)).transpose()
-
- res = model.estimate(parameters=["qt.a1", "qt.a2"],
- measurements = (['qt.x1', 'qt.x2'], data), input=(['u1','u2'],u))
-
-
- model.reset()
-
- # Set optimal values for a1 and a2 into the model
- model.set(['qt.a1'], res["qt.a1"])
- model.set(['qt.a2'], res["qt.a2"])
-
- # Simulate model response with optimal parameters a1 and a2
- res = model.simulate(input=(['u1','u2'], u), start_time=0., final_time=60)
-
- assert np.abs(res.final('qt.x1') - 0.07060188) < 1e-3, "Was: " + str(res.final('qt.x1')) + ", expected: 0.07060188"
- assert np.abs(res.final('qt.x2') - 0.06654621) < 1e-3
- assert np.abs(res.final('qt.x3') - 0.02736549) < 1e-3
- assert np.abs(res.final('qt.x4') - 0.02789857) < 1e-3
- assert np.abs(res.final('u1') - 6.0) < 1e-3
- assert np.abs(res.final('u2') - 5.0) < 1e-3
+@pytest.mark.assimulo
+class Test_FMUModelME2_Estimate:
+ def test_quadtank_estimate(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "QuadTankPack_Sim_QuadTank.fmu"), _connect_dll=False)
+
+ g = model.get_real([model.get_variable_valueref("qt.g")], evaluate = False)
+ g1_nmp = model.get_real([model.get_variable_valueref("qt.g1_nmp")], evaluate = False)
+ g2_nmp = model.get_real([model.get_variable_valueref("qt.g2_nmp")], evaluate = False)
+ k1_nmp = model.get_real([model.get_variable_valueref("qt.k1_nmp")], evaluate = False)
+ k2_nmp = model.get_real([model.get_variable_valueref("qt.k2_nmp")], evaluate = False)
+ A1 = model.get_real([model.get_variable_valueref("qt.A1")], evaluate = False)
+ A2 = model.get_real([model.get_variable_valueref("qt.A2")], evaluate = False)
+ A3 = model.get_real([model.get_variable_valueref("qt.A3")], evaluate = False)
+ A4 = model.get_real([model.get_variable_valueref("qt.A4")], evaluate = False)
+ a3 = model.get_real([model.get_variable_valueref("qt.a3")], evaluate = False)
+ a4 = model.get_real([model.get_variable_valueref("qt.a4")], evaluate = False)
+ u1_vref = model.get_variable_valueref("u1")
+ u2_vref = model.get_variable_valueref("u2")
+ a1_vref = model.get_variable_valueref("qt.a1")
+ a2_vref = model.get_variable_valueref("qt.a2")
+
+ def f(*args, **kwargs):
+ x1 = model.continuous_states[0]
+ x2 = model.continuous_states[1]
+ x3 = model.continuous_states[2]
+ x4 = model.continuous_states[3]
+
+ u1 = model.get_real([u1_vref], evaluate = False)
+ u2 = model.get_real([u2_vref], evaluate = False)
+ a1 = model.get_real([a1_vref], evaluate = False)
+ a2 = model.get_real([a2_vref], evaluate = False)
+
+ der_x1 = -a1/A1*np.sqrt(2.*g*x1) + a3/A1*np.sqrt(2*g*x3) + g1_nmp*k1_nmp/A1*u1
+ der_x2 = -a2/A2*np.sqrt(2.*g*x2) + a4/A2*np.sqrt(2*g*x4) + g2_nmp*k2_nmp/A2*u2
+ der_x3 = -a3/A3*np.sqrt(2.*g*x3) + (1.-g2_nmp)*k2_nmp/A3*u2
+ der_x4 = -a4/A4*np.sqrt(2.*g*x4) + (1.-g1_nmp)*k1_nmp/A4*u1
+ return np.concatenate([der_x1, der_x2, der_x3, der_x4])
+
+ model.get_derivatives = f
+
+ # Load measurement data from file
+ data = loadmat(os.path.join(file_path, "files", "Results", "qt_par_est_data.mat"), appendmat=False)
+
+ # Extract data series
+ t_meas = data['t'][6000::100,0]-60
+ y1_meas = data['y1_f'][6000::100,0]/100
+ y2_meas = data['y2_f'][6000::100,0]/100
+ y3_meas = data['y3_d'][6000::100,0]/100
+ y4_meas = data['y4_d'][6000::100,0]/100
+ u1 = data['u1_d'][6000::100,0]
+ u2 = data['u2_d'][6000::100,0]
+
+ # Build input trajectory matrix for use in simulation
+ u = np.transpose(np.vstack((t_meas,u1,u2)))
+
+ # Estimation of 2 parameters
+ data = np.vstack((t_meas, y1_meas, y2_meas)).transpose()
+
+ res = model.estimate(parameters=["qt.a1", "qt.a2"],
+ measurements = (['qt.x1', 'qt.x2'], data), input=(['u1','u2'],u))
+
+
+ model.reset()
+
+ # Set optimal values for a1 and a2 into the model
+ model.set(['qt.a1'], res["qt.a1"])
+ model.set(['qt.a2'], res["qt.a2"])
+
+ # Simulate model response with optimal parameters a1 and a2
+ res = model.simulate(input=(['u1','u2'], u), start_time=0., final_time=60)
+
+ assert np.abs(res.final('qt.x1') - 0.07060188) < 1e-3, "Was: " + str(res.final('qt.x1')) + ", expected: 0.07060188"
+ assert np.abs(res.final('qt.x2') - 0.06654621) < 1e-3
+ assert np.abs(res.final('qt.x3') - 0.02736549) < 1e-3
+ assert np.abs(res.final('qt.x4') - 0.02789857) < 1e-3
+ assert np.abs(res.final('u1') - 6.0) < 1e-3
+ assert np.abs(res.final('u2') - 5.0) < 1e-3
diff --git a/tests/test_fmi_extended.py b/tests/test_fmi_extended.py
index f0045d89..dc47761c 100644
--- a/tests/test_fmi_extended.py
+++ b/tests/test_fmi_extended.py
@@ -21,7 +21,6 @@
from pyfmi.fmi_extended import FMUModelME1Extended
file_path = os.path.dirname(os.path.abspath(__file__))
-
me1_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "ME1.0")
class Test_FMUModelME1Extended:
diff --git a/tests/test_fmi_master.py b/tests/test_fmi_master.py
index ea18f579..b1e29012 100644
--- a/tests/test_fmi_master.py
+++ b/tests/test_fmi_master.py
@@ -23,12 +23,11 @@
from pyfmi import Master
from pyfmi.fmi import FMUException, FMUModelCS2, FMUModelME2
-from pyfmi.tests.test_util import Dummy_FMUModelCS2
+from pyfmi.test_util import Dummy_FMUModelCS2
from pyfmi.common.io import ResultHandler, ResultSizeError
from pyfmi.common.algorithm_drivers import UnrecognizedOptionError
file_path = os.path.dirname(os.path.abspath(__file__))
-
cs2_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "CS2.0")
me2_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0")
@@ -167,11 +166,10 @@ def test_basic_simulation_memory(self):
opts = {"result_handling":"memory"}
self._basic_simulation(opts)
- @testattr(stddist = True)
def test_basic_simulation_max_result_size(self):
opts = {"result_max_size":10000}
- with nose.tools.assert_raises(ResultSizeError):
+ with pytest.raises(ResultSizeError):
self._basic_simulation(opts)
def test_basic_simulation_mat_file_naming(self):
@@ -457,29 +455,20 @@ def test_error_check_invalid_value(self):
models, connections = self._load_basic_simulation()
test_values = [1/2, 1/3, "0.5", False]
- # TODO: tidy up with pytest
expected_substr = "Option 'result_downsampling_factor' must be an integer,"
+ ## TODO: Pytest parametrization
for value in test_values:
- try:
+ with pytest.raises(Exception, match = expected_substr):
self._sim_basic_simulation(models, connections, {'result_downsampling_factor': value})
- error_raised = False
- except FMUException as e:
- error_raised = True
- assert expected_substr in str(e), f"Error was {str(e)}, expected substring {expected_substr}"
- assert error_raised
-
- # TODO: Test case that supports storing FMU states required
+
+ @pytest.mark.skipif(True, reason = "Error controlled simulation only supported if storing FMU states are available.")
def test_error_controlled_with_downsampling(self):
models, connections = self._load_basic_simulation()
uptate_options = {'result_downsampling_factor': 2,
'error_controlled': True}
- # TODO: Tidy up with pytest
msg = "Result downsampling not supported for error controlled simulation, no downsampling will be performed."
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("default")
+ with pytest.warns(UserWarning, match = msg):
self._sim_basic_simulation(models, connections, uptate_options)
- # there will be some other warnings from FMU loading
- assert f"UserWarning('{msg}')" in [i.message for i in w]
def test_downsample_result_with_store_step_before_update(self):
""" Test result_downsampling_factor with store_step_before_update. """
diff --git a/tests/test_io.py b/tests/test_io.py
index 5853d85c..9b632c27 100644
--- a/tests/test_io.py
+++ b/tests/test_io.py
@@ -48,15 +48,14 @@
)
import pyfmi.fmi as fmi
-from pyfmi.tests.test_util import Dummy_FMUModelME1, Dummy_FMUModelCS1, Dummy_FMUModelME2, Dummy_FMUModelCS2
+from pyfmi.test_util import Dummy_FMUModelME1, Dummy_FMUModelCS1, Dummy_FMUModelME2, Dummy_FMUModelCS2
file_path = os.path.dirname(os.path.abspath(__file__))
-assimulo_installed = True
try:
import assimulo
except ImportError:
- assimulo_installed = False
+ pass
def _run_negated_alias(model, result_type, result_file_name=""):
opts = model.simulate_options()
@@ -75,102 +74,102 @@ def _run_negated_alias(model, result_type, result_file_name=""):
for i in range(len(x)):
assert x[i] == -y[i]
-if assimulo_installed:
- class TestResultFileText_Simulation:
+@pytest.mark.assimulo
+class TestResultFileText_Simulation:
- def _correct_syntax_after_simulation_failure(self, result_file_name):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ def _correct_syntax_after_simulation_failure(self, result_file_name):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- def f(*args, **kwargs):
- if simple_alias.time > 0.5:
- raise Exception
- return -simple_alias.continuous_states
+ def f(*args, **kwargs):
+ if simple_alias.time > 0.5:
+ raise Exception
+ return -simple_alias.continuous_states
- simple_alias.get_derivatives = f
+ simple_alias.get_derivatives = f
- opts = simple_alias.simulate_options()
- opts["result_handling"] = "file"
- opts["solver"] = "ExplicitEuler"
- opts["result_file_name"] = result_file_name
+ opts = simple_alias.simulate_options()
+ opts["result_handling"] = "file"
+ opts["solver"] = "ExplicitEuler"
+ opts["result_file_name"] = result_file_name
- successful_simulation = False
- try:
- res = simple_alias.simulate(options=opts)
- successful_simulation = True #The above simulation should fail...
- except Exception:
- pass
+ successful_simulation = False
+ try:
+ res = simple_alias.simulate(options=opts)
+ successful_simulation = True #The above simulation should fail...
+ except Exception:
+ pass
- if successful_simulation:
- raise Exception
+ if successful_simulation:
+ raise Exception
- result = ResultDymolaTextual(result_file_name)
+ result = ResultDymolaTextual(result_file_name)
- x = result.get_variable_data("x").x
- y = result.get_variable_data("y").x
+ x = result.get_variable_data("x").x
+ y = result.get_variable_data("y").x
- assert len(x) > 2
+ assert len(x) > 2
- for i in range(len(x)):
- assert x[i] == -y[i]
+ for i in range(len(x)):
+ assert x[i] == -y[i]
- def test_correct_file_after_simulation_failure(self):
- self._correct_syntax_after_simulation_failure("NegatedAlias_result.txt")
+ def test_correct_file_after_simulation_failure(self):
+ self._correct_syntax_after_simulation_failure("NegatedAlias_result.txt")
- def test_correct_stream_after_simulation_failure(self):
- stream = StringIO("")
- self._correct_syntax_after_simulation_failure(stream)
+ def test_correct_stream_after_simulation_failure(self):
+ stream = StringIO("")
+ self._correct_syntax_after_simulation_failure(stream)
- def test_read_all_variables_using_model_variables(self):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ def test_read_all_variables_using_model_variables(self):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- opts = simple_alias.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerFile(simple_alias)
+ opts = simple_alias.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerFile(simple_alias)
- res = simple_alias.simulate(options=opts)
+ res = simple_alias.simulate(options=opts)
- for var in simple_alias.get_model_variables():
- res[var]
+ for var in simple_alias.get_model_variables():
+ res[var]
- def test_read_alias_derivative(self):
- simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False)
+ def test_read_alias_derivative(self):
+ simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False)
- opts = simple_alias.simulate_options()
- opts["result_handling"] = "file"
+ opts = simple_alias.simulate_options()
+ opts["result_handling"] = "file"
- res = simple_alias.simulate(options=opts)
+ res = simple_alias.simulate(options=opts)
- derx = res["der(x)"]
- dery = res["der(y)"]
+ derx = res["der(x)"]
+ dery = res["der(y)"]
- assert len(derx) > 0
- for i in range(len(derx)):
- assert derx[i] == dery[i]
+ assert len(derx) > 0
+ for i in range(len(derx)):
+ assert derx[i] == dery[i]
- def test_no_variables(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
+ def test_no_variables(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = "file"
- opts["result_file_name"] = "NoMatchingTest.txt"
- opts["filter"] = "NoMatchingVariables"
+ opts = model.simulate_options()
+ opts["result_handling"] = "file"
+ opts["result_file_name"] = "NoMatchingTest.txt"
+ opts["filter"] = "NoMatchingVariables"
- res = model.simulate(options=opts)
+ res = model.simulate(options=opts)
- assert 1.0 == pytest.approx(res["time"][-1])
+ assert 1.0 == pytest.approx(res["time"][-1])
- def test_enumeration_file(self):
+ def test_enumeration_file(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False)
- data_type = model.get_variable_data_type("mode")
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False)
+ data_type = model.get_variable_data_type("mode")
- assert data_type == fmi.FMI2_ENUMERATION
+ assert data_type == fmi.FMI2_ENUMERATION
- opts = model.simulate_options()
- opts["result_handling"] = "file"
+ opts = model.simulate_options()
+ opts["result_handling"] = "file"
- res = model.simulate(options=opts)
- res["mode"] #Check that the enumeration variable is in the dict, otherwise exception
+ res = model.simulate(options=opts)
+ res["mode"] #Check that the enumeration variable is in the dict, otherwise exception
class TestResultFileText:
def _get_description(self, result_file_name):
@@ -343,226 +342,226 @@ def readline(self):
with pytest.raises(JIOError, match = msg):
res = ResultDymolaTextual(stream)
-if assimulo_installed:
- class TestResultMemory_Simulation:
- def test_memory_options_me1(self):
- simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- _run_negated_alias(simple_alias, "memory")
+@pytest.mark.assimulo
+class TestResultMemory_Simulation:
+ def test_memory_options_me1(self):
+ simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ _run_negated_alias(simple_alias, "memory")
- def test_memory_options_me2(self):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- _run_negated_alias(simple_alias, "memory")
+ def test_memory_options_me2(self):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ _run_negated_alias(simple_alias, "memory")
- def test_only_parameters(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
+ def test_only_parameters(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = "memory"
- opts["filter"] = "p2"
+ opts = model.simulate_options()
+ opts["result_handling"] = "memory"
+ opts["filter"] = "p2"
- res = model.simulate(options=opts)
+ res = model.simulate(options=opts)
- assert 3.0 == pytest.approx(res["p2"][0])
- assert not isinstance(res.initial("p2"), np.ndarray)
- assert not isinstance(res.final("p2"), np.ndarray)
+ assert 3.0 == pytest.approx(res["p2"][0])
+ assert not isinstance(res.initial("p2"), np.ndarray)
+ assert not isinstance(res.final("p2"), np.ndarray)
- def test_no_variables(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
+ def test_no_variables(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = "memory"
- opts["filter"] = "NoMatchingVariables"
+ opts = model.simulate_options()
+ opts["result_handling"] = "memory"
+ opts["filter"] = "NoMatchingVariables"
- res = model.simulate(options=opts)
+ res = model.simulate(options=opts)
- assert 1.0 == pytest.approx(res["time"][-1])
+ assert 1.0 == pytest.approx(res["time"][-1])
- def test_enumeration_memory(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False)
- data_type = model.get_variable_data_type("mode")
+ def test_enumeration_memory(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False)
+ data_type = model.get_variable_data_type("mode")
- assert data_type == fmi.FMI2_ENUMERATION
+ assert data_type == fmi.FMI2_ENUMERATION
- opts = model.simulate_options()
- opts["result_handling"] = "memory"
+ opts = model.simulate_options()
+ opts["result_handling"] = "memory"
- res = model.simulate(options=opts)
- res["mode"] #Check that the enumeration variable is in the dict, otherwise exception
+ res = model.simulate(options=opts)
+ res["mode"] #Check that the enumeration variable is in the dict, otherwise exception
class TestResultMemory:
pass
-if assimulo_installed:
- class TestResultFileBinary_Simulation:
- def _correct_file_after_simulation_failure(self, result_file_name):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+@pytest.mark.assimulo
+class TestResultFileBinary_Simulation:
+ def _correct_file_after_simulation_failure(self, result_file_name):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- def f(*args, **kwargs):
- if simple_alias.time > 0.5:
- raise Exception
- return -simple_alias.continuous_states
+ def f(*args, **kwargs):
+ if simple_alias.time > 0.5:
+ raise Exception
+ return -simple_alias.continuous_states
- simple_alias.get_derivatives = f
+ simple_alias.get_derivatives = f
- opts = simple_alias.simulate_options()
- opts["result_handling"] = "binary"
- opts["result_file_name"] = result_file_name
- opts["solver"] = "ExplicitEuler"
+ opts = simple_alias.simulate_options()
+ opts["result_handling"] = "binary"
+ opts["result_file_name"] = result_file_name
+ opts["solver"] = "ExplicitEuler"
- successful_simulation = False
- try:
- res = simple_alias.simulate(options=opts)
- successful_simulation = True #The above simulation should fail...
- except Exception:
- pass
+ successful_simulation = False
+ try:
+ res = simple_alias.simulate(options=opts)
+ successful_simulation = True #The above simulation should fail...
+ except Exception:
+ pass
- if successful_simulation:
- raise Exception
+ if successful_simulation:
+ raise Exception
- result = ResultDymolaBinary(result_file_name)
+ result = ResultDymolaBinary(result_file_name)
- x = result.get_variable_data("x").x
- y = result.get_variable_data("y").x
+ x = result.get_variable_data("x").x
+ y = result.get_variable_data("y").x
- assert len(x) > 2
+ assert len(x) > 2
- for i in range(len(x)):
- assert x[i] == -y[i]
+ for i in range(len(x)):
+ assert x[i] == -y[i]
- def test_work_flow_me2_file(self):
- self._correct_file_after_simulation_failure("NegatedAlias_result.mat")
+ def test_work_flow_me2_file(self):
+ self._correct_file_after_simulation_failure("NegatedAlias_result.mat")
- def test_work_flow_me2_stream(self):
- stream = BytesIO()
- self._correct_file_after_simulation_failure(stream)
+ def test_work_flow_me2_stream(self):
+ stream = BytesIO()
+ self._correct_file_after_simulation_failure(stream)
- def _only_parameters(self, result_file_name):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
+ def _only_parameters(self, result_file_name):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerBinaryFile(model)
- opts["filter"] = "p2"
- opts["result_file_name"] = result_file_name
+ opts = model.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerBinaryFile(model)
+ opts["filter"] = "p2"
+ opts["result_file_name"] = result_file_name
- res = model.simulate(options=opts)
+ res = model.simulate(options=opts)
- assert 3.0 == pytest.approx(res["p2"][0])
+ assert 3.0 == pytest.approx(res["p2"][0])
- def test_only_parameters_file(self):
- self._only_parameters("ParameterAlias_result.mat")
+ def test_only_parameters_file(self):
+ self._only_parameters("ParameterAlias_result.mat")
- def test_only_parameters_stream(self):
- stream = BytesIO()
- self._only_parameters(stream)
+ def test_only_parameters_stream(self):
+ stream = BytesIO()
+ self._only_parameters(stream)
- def _no_variables(self, result_file_name):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
+ def _no_variables(self, result_file_name):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerBinaryFile(model)
- opts["filter"] = "NoMatchingVariables"
- opts["result_file_name"] = result_file_name
+ opts = model.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerBinaryFile(model)
+ opts["filter"] = "NoMatchingVariables"
+ opts["result_file_name"] = result_file_name
- res = model.simulate(options=opts)
+ res = model.simulate(options=opts)
- assert 1.0 == pytest.approx(res["time"][-1])
+ assert 1.0 == pytest.approx(res["time"][-1])
- def test_no_variables_file(self):
- self._no_variables("ParameterAlias_result.mat")
+ def test_no_variables_file(self):
+ self._no_variables("ParameterAlias_result.mat")
- def test_no_variables_stream(self):
- stream = BytesIO()
- self._no_variables(stream)
+ def test_no_variables_stream(self):
+ stream = BytesIO()
+ self._no_variables(stream)
- def test_read_alias_derivative(self):
- simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False)
+ def test_read_alias_derivative(self):
+ simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False)
- opts = simple_alias.simulate_options()
- opts["result_handling"] = "binary"
+ opts = simple_alias.simulate_options()
+ opts["result_handling"] = "binary"
- res = simple_alias.simulate(options=opts)
+ res = simple_alias.simulate(options=opts)
- derx = res["der(x)"]
- dery = res["der(y)"]
+ derx = res["der(x)"]
+ dery = res["der(y)"]
- assert len(derx) > 0
- for i in range(len(derx)):
- assert derx[i] == dery[i]
+ assert len(derx) > 0
+ for i in range(len(derx)):
+ assert derx[i] == dery[i]
- def test_enumeration_binary(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False)
- data_type = model.get_variable_data_type("mode")
+ def test_enumeration_binary(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False)
+ data_type = model.get_variable_data_type("mode")
- assert data_type == fmi.FMI2_ENUMERATION
+ assert data_type == fmi.FMI2_ENUMERATION
- opts = model.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerBinaryFile(model)
+ opts = model.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerBinaryFile(model)
- res = model.simulate(options=opts)
- res["mode"] #Check that the enumeration variable is in the dict, otherwise exception
+ res = model.simulate(options=opts)
+ res["mode"] #Check that the enumeration variable is in the dict, otherwise exception
- def test_integer_start_time(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False)
+ def test_integer_start_time(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = "binary"
+ opts = model.simulate_options()
+ opts["result_handling"] = "binary"
- #Assert that there is no exception when reloading the file
- res = model.simulate(start_time=0, options=opts)
+ #Assert that there is no exception when reloading the file
+ res = model.simulate(start_time=0, options=opts)
- def test_read_all_variables_using_model_variables(self):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ def test_read_all_variables_using_model_variables(self):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- opts = simple_alias.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerBinaryFile(simple_alias)
+ opts = simple_alias.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerBinaryFile(simple_alias)
- res = simple_alias.simulate(options=opts)
+ res = simple_alias.simulate(options=opts)
- for var in simple_alias.get_model_variables():
- res[var]
+ for var in simple_alias.get_model_variables():
+ res[var]
- def test_variable_alias_custom_handler(self):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ def test_variable_alias_custom_handler(self):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- opts = simple_alias.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerBinaryFile(simple_alias)
+ opts = simple_alias.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerBinaryFile(simple_alias)
- res = simple_alias.simulate(options=opts)
+ res = simple_alias.simulate(options=opts)
- # test that res['y'] returns a vector of the same length as the time
- # vector
- assert len(res['y']) ==len(res['time']), "Wrong size of result vector."
+ # test that res['y'] returns a vector of the same length as the time
+ # vector
+ assert len(res['y']) ==len(res['time']), "Wrong size of result vector."
- x = res["x"]
- y = res["y"]
+ x = res["x"]
+ y = res["y"]
- for i in range(len(x)):
- assert x[i] == -y[i]
+ for i in range(len(x)):
+ assert x[i] == -y[i]
- def test_binary_options_me1(self):
- simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- _run_negated_alias(simple_alias, "binary")
+ def test_binary_options_me1(self):
+ simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ _run_negated_alias(simple_alias, "binary")
- def test_binary_options_me2(self):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- _run_negated_alias(simple_alias, "binary")
+ def test_binary_options_me2(self):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ _run_negated_alias(simple_alias, "binary")
- def test_binary_options_me1_stream(self):
- simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- stream = BytesIO()
- _run_negated_alias(simple_alias, "binary", stream)
+ def test_binary_options_me1_stream(self):
+ simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ stream = BytesIO()
+ _run_negated_alias(simple_alias, "binary", stream)
- def test_binary_options_me2_stream(self):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- stream = BytesIO()
- _run_negated_alias(simple_alias, "binary", stream)
+ def test_binary_options_me2_stream(self):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ stream = BytesIO()
+ _run_negated_alias(simple_alias, "binary", stream)
class TestResultFileBinary:
def _get_description_unicode(self, result_file_name):
@@ -1408,83 +1407,84 @@ def test_get_last_result_file3(self):
test_model._result_file = 123 # arbitrary number, just verify get_last_result_file works
assert test_model.get_last_result_file() is None, "Expected None but got {}".format(test_model.get_last_result_file())
-if assimulo_installed:
- class TestResultCSVTextual_Simulation:
- def test_only_parameters(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
- opts = model.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerCSV(model)
- opts["filter"] = "p2"
+@pytest.mark.assimulo
+class TestResultCSVTextual_Simulation:
+ def test_only_parameters(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
- res = model.simulate(options=opts)
+ opts = model.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerCSV(model)
+ opts["filter"] = "p2"
- assert 3.0 == pytest.approx(res["p2"][0])
+ res = model.simulate(options=opts)
- def test_no_variables(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
+ assert 3.0 == pytest.approx(res["p2"][0])
- opts = model.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerCSV(model)
- opts["filter"] = "NoMatchingVariables"
- opts["result_file_name"] = "NoMatchingTest.csv"
+ def test_no_variables(self):
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False)
+
+ opts = model.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerCSV(model)
+ opts["filter"] = "NoMatchingVariables"
+ opts["result_file_name"] = "NoMatchingTest.csv"
- res = model.simulate(options=opts)
+ res = model.simulate(options=opts)
- assert 1.0 == pytest.approx(res["time"][-1])
+ assert 1.0 == pytest.approx(res["time"][-1])
- def test_variable_alias_custom_handler(self):
- simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ def test_variable_alias_custom_handler(self):
+ simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- opts = simple_alias.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerCSV(simple_alias)
+ opts = simple_alias.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerCSV(simple_alias)
- res = simple_alias.simulate(options=opts)
+ res = simple_alias.simulate(options=opts)
- # test that res['y'] returns a vector of the same length as the time
- # vector
- assert len(res['y']) ==len(res['time']), "Wrong size of result vector."
+ # test that res['y'] returns a vector of the same length as the time
+ # vector
+ assert len(res['y']) ==len(res['time']), "Wrong size of result vector."
- x = res["x"]
- y = res["y"]
+ x = res["x"]
+ y = res["y"]
- for i in range(len(x)):
- assert x[i] == -y[i]
+ for i in range(len(x)):
+ assert x[i] == -y[i]
- def test_csv_options_me1(self):
- simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- _run_negated_alias(simple_alias, "csv")
+ def test_csv_options_me1(self):
+ simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ _run_negated_alias(simple_alias, "csv")
- def test_csv_options_me2(self):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- _run_negated_alias(simple_alias, "csv")
+ def test_csv_options_me2(self):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ _run_negated_alias(simple_alias, "csv")
- def test_csv_options_me1_stream(self):
- simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
- stream = StringIO()
- _run_negated_alias(simple_alias, "csv", stream)
+ def test_csv_options_me1_stream(self):
+ simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False)
+ stream = StringIO()
+ _run_negated_alias(simple_alias, "csv", stream)
- def test_csv_options_me2(self):
- simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
- stream = StringIO()
- _run_negated_alias(simple_alias, "csv", stream)
+ def test_csv_options_me2(self):
+ simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False)
+ stream = StringIO()
+ _run_negated_alias(simple_alias, "csv", stream)
- def test_enumeration_csv(self):
+ def test_enumeration_csv(self):
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False)
- data_type = model.get_variable_data_type("mode")
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False)
+ data_type = model.get_variable_data_type("mode")
- assert data_type == fmi.FMI2_ENUMERATION
+ assert data_type == fmi.FMI2_ENUMERATION
- opts = model.simulate_options()
- opts["result_handling"] = "custom"
- opts["result_handler"] = ResultHandlerCSV(model)
+ opts = model.simulate_options()
+ opts["result_handling"] = "custom"
+ opts["result_handler"] = ResultHandlerCSV(model)
- res = model.simulate(options=opts)
- res["mode"] #Check that the enumeration variable is in the dict, otherwise exception
+ res = model.simulate(options=opts)
+ res["mode"] #Check that the enumeration variable is in the dict, otherwise exception
class TestResultCSVTextual:
@@ -1592,7 +1592,7 @@ def test_csv_options_cs1(self):
def test_csv_options_cs2(self):
simple_alias = Dummy_FMUModelCS2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "NegatedAlias.fmu"), _connect_dll=False)
- self._run_negated_alias(self, simple_alias)
+ _run_negated_alias(simple_alias, "csv")
class TestResultDymolaBinary:
@@ -1773,9 +1773,8 @@ def _test_get_variables_data(self, dynamic_diagnostics: bool, nbr_of_calls: int,
assert data_to_return, "Something went wrong, no test data was generated"
return data_to_return
- @testattr(stddist = True)
def test_get_variables_data_values0(self):
- """ Verifing values from get_variables_data. """
+ """ Verifying values from get_variables_data. """
vars_to_test = ['J4.phi']
test_data_sets = self._test_get_variables_data(False, 3, None, vars_to_test, lambda x: None, "TestFile00.mat")
@@ -1788,9 +1787,8 @@ def test_get_variables_data_values0(self):
for index, test_data in test_data_sets.items():
np.testing.assert_array_almost_equal(test_data['J4.phi'].x, reference_data[index])
- @testattr(stddist = True)
def test_get_variables_data_values1(self):
- """ Verifing values from get_variables_data, with dynamic_diagnostics = True. """
+ """ Verifying values from get_variables_data, with dynamic_diagnostics = True. """
vars_to_test = ['time', 'J4.phi', '@Diagnostics.step_time', '@Diagnostics.nbr_steps']
test_data_sets = self._test_get_variables_data(True, 5, 3, vars_to_test, lambda x: None, "TestFile01.mat")
@@ -1807,9 +1805,8 @@ def test_get_variables_data_values1(self):
for index, test_data in test_data_sets.items():
np.testing.assert_array_almost_equal(test_data['J4.phi'].x, reference_data[index])
- @testattr(stddist = True)
def test_get_variables_data_values2(self):
- """ Verifing values from get_variables_data, retrieving partial trajectories. """
+ """ Verifying values from get_variables_data, retrieving partial trajectories. """
vars_to_test = ['time', 'J4.phi']
test_data_sets = self._test_get_variables_data(False, 5, None, vars_to_test, lambda x: x + 1, "TestFile02.mat")
@@ -1826,7 +1823,7 @@ def test_get_variables_data_values2(self):
@testattr(stddist = True)
def test_get_variables_data_values3(self):
- """ Verifing values from get_variables_data, and only asking for diagnostic variables. """
+ """ Verifying values from get_variables_data, and only asking for diagnostic variables. """
vars_to_test = ['@Diagnostics.step_time', '@Diagnostics.nbr_steps']
test_data_sets = self._test_get_variables_data(True, 5, 1, vars_to_test, lambda x: None, "TestFile03.mat")
@@ -1853,7 +1850,7 @@ def test_get_variables_data_values3(self):
@testattr(stddist = True)
def test_get_variables_data_values4(self):
- """ Verifing values from get_variables_data, partial trajectories and checking both time and diagnostic data."""
+ """ Verifying values from get_variables_data, partial trajectories and checking both time and diagnostic data."""
vars_to_test = ['time', '@Diagnostics.nbr_steps']
test_data_sets = self._test_get_variables_data(True, 5, 1, vars_to_test, lambda x: x + 2, "TestFile04.mat")
@@ -1940,187 +1937,207 @@ def test_trajectory_lengths(self):
assert rdb.get_variables_data([], start_index = 1)[1] == 1
assert rdb.get_variables_data([], start_index = 5)[1] == 5
-if assimulo_installed:
- class TestFileSizeLimit:
+@pytest.mark.assimulo
+class TestFileSizeLimit:
+ def _setup(self, result_type, result_file_name="", fmi_type="me"):
+ if fmi_type == "me":
+ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False)
+ else:
+ model = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu"), _connect_dll=False)
+
+ opts = model.simulate_options()
+ opts["result_handling"] = result_type
+ opts["result_file_name"] = result_file_name
- def _setup(self, result_type, result_file_name="", fmi_type="me"):
- if fmi_type == "me":
- model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False)
- else:
- model = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu"), _connect_dll=False)
+ return model, opts
- opts = model.simulate_options()
- opts["result_handling"] = result_type
- opts["result_file_name"] = result_file_name
+ def _test_result(self, result_type, result_file_name="", max_size=1e6):
+ model, opts = self._setup(result_type, result_file_name)
- return model, opts
+ opts["result_max_size"] = max_size
- def _test_result(self, result_type, result_file_name="", max_size=1e6):
- model, opts = self._setup(result_type, result_file_name)
+ #No exception should be raised.
+ res = model.simulate(options=opts)
- opts["result_max_size"] = max_size
+ def _test_result_exception(self, result_type, result_file_name="", fmi_type="me"):
+ model, opts = self._setup(result_type, result_file_name, fmi_type)
- #No exception should be raised.
- res = model.simulate(options=opts)
+ opts["result_max_size"] = 10
- def _test_result_exception(self, result_type, result_file_name="", fmi_type="me"):
- model, opts = self._setup(result_type, result_file_name, fmi_type)
+ with pytest.raises(ResultSizeError):
+ model.simulate(options=opts)
- opts["result_max_size"] = 10
+ def _test_result_size_verification(self, result_type, result_file_name="", dynamic_diagnostics=False):
+ """
+ Verifies that the ResultSizeError exception is triggered (due to too large result) and also verifies
+ that the resulting file is within bounds of the set maximum size.
+ """
+ model, opts = self._setup(result_type, result_file_name)
+ model.setup_experiment()
+ model.initialize()
- with pytest.raises(ResultSizeError):
- model.simulate(options=opts)
+ max_size = 1e6
+ opts["result_max_size"] = max_size
+ opts["dynamic_diagnostics"] = dynamic_diagnostics
+ opts["logging"] = dynamic_diagnostics
+ opts["ncp"] = 0 #Set to zero to circumvent the early size check
+ ncp = 10000
- def _test_result_size_verification(self, result_type, result_file_name="", dynamic_diagnostics=False):
- """
- Verifies that the ResultSizeError exception is triggered (due to too large result) and also verifies
- that the resulting file is within bounds of the set maximum size.
- """
- model, opts = self._setup(result_type, result_file_name)
- model.setup_experiment()
- model.initialize()
+ result_handler = get_result_handler(model, opts)
- max_size = 1e6
- opts["result_max_size"] = max_size
- opts["dynamic_diagnostics"] = dynamic_diagnostics
- opts["logging"] = dynamic_diagnostics
- opts["ncp"] = 0 #Set to zero to circumvent the early size check
- ncp = 10000
+ result_handler.set_options(opts)
+ result_handler.initialize_complete()
- result_handler = get_result_handler(model, opts)
+ if opts["dynamic_diagnostics"]:
+ opts['CVode_options']['rtol'] = 1e-6
+ opts['CVode_options']['atol'] = model.nominal_continuous_states * opts['CVode_options']['rtol']
+ diag_params, diag_vars = setup_diagnostics_variables(model, 0, opts, opts['CVode_options'])
+ result_handler.simulation_start(diag_params, diag_vars)
+ else:
+ result_handler.simulation_start()
- result_handler.set_options(opts)
- result_handler.initialize_complete()
+ with pytest.raises(ResultSizeError):
+ for _ in range(ncp):
+ result_handler.integration_point()
- if opts["dynamic_diagnostics"]:
- opts['CVode_options']['rtol'] = 1e-6
- opts['CVode_options']['atol'] = model.nominal_continuous_states * opts['CVode_options']['rtol']
- diag_params, diag_vars = setup_diagnostics_variables(model, 0, opts, opts['CVode_options'])
- result_handler.simulation_start(diag_params, diag_vars)
- else:
- result_handler.simulation_start()
+ if opts["dynamic_diagnostics"]:
+ result_handler.diagnostics_point(np.array([val[0] for val in diag_vars.values()], dtype=float))
- with pytest.raises(ResultSizeError):
- for _ in range(ncp):
- result_handler.integration_point()
+ result_file = model.get_last_result_file()
+ file_size = os.path.getsize(result_file)
- if opts["dynamic_diagnostics"]:
- result_handler.diagnostics_point(np.array([val[0] for val in diag_vars.values()], dtype=float))
+ assert file_size > max_size*0.9 and file_size < max_size*1.1, \
+ "The file size is not within 10% of the given max size"
+
+ def _test_result_size_early_abort(self, result_type, result_file_name=""):
+ """
+ Verifies that the ResultSizeError is triggered and also verifies that the cause of the error being
+ triggered was due to that the ESTIMATE for the result size was too big.
+ """
+ model, opts = self._setup(result_type, result_file_name)
- result_file = model.get_last_result_file()
+ max_size = 1e6
+ opts["result_max_size"] = max_size
+ opts["ncp"] = 10000000
+
+ with pytest.raises(ResultSizeError):
+ model.simulate(options=opts)
+
+ result_file = model.get_last_result_file()
+ if result_file:
file_size = os.path.getsize(result_file)
assert file_size > max_size*0.9 and file_size < max_size*1.1, \
"The file size is not within 10% of the given max size"
- def _test_result_size_early_abort(self, result_type, result_file_name=""):
- """
- Verifies that the ResultSizeError is triggered and also verifies that the cause of the error being
- triggered was due to that the ESTIMATE for the result size was too big.
- """
- model, opts = self._setup(result_type, result_file_name)
-
- max_size = 1e6
- opts["result_max_size"] = max_size
- opts["ncp"] = 10000000
+ def _test_result_size_early_abort(self, result_type, result_file_name=""):
+ """
+ Verifies that the ResultSizeError is triggered and also verifies that the cause of the error being
+ triggered was due to that the ESTIMATE for the result size was too big.
+ """
+ model, opts = self._setup(result_type, result_file_name)
- with pytest.raises(ResultSizeError):
- model.simulate(options=opts)
+ max_size = 1e6
+ opts["result_max_size"] = max_size
+ opts["ncp"] = 10000000
- result_file = model.get_last_result_file()
- if result_file:
- file_size = os.path.getsize(result_file)
+ with pytest.raises(ResultSizeError):
+ model.simulate(options=opts)
- assert file_size < max_size*0.1, \
- "The file size is not small, no early abort"
+ result_file = model.get_last_result_file()
+ if result_file:
+ file_size = os.path.getsize(result_file)
- # TODO: Pytest parametrization
+ assert file_size < max_size*0.1, \
+ "The file size is not small, no early abort"
+
+ # TODO: Pytest parametrization
+ """
+ Binary
+ """
+ def test_binary_file_size_verification_diagnostics(self):
"""
- Binary
+ Make sure that the diagnostics variables are also taken into account.
"""
- def test_binary_file_size_verification_diagnostics(self):
- """
- Make sure that the diagnostics variables are also taken into account.
- """
- self._test_result_size_verification("binary", dynamic_diagnostics=True)
-
- def test_binary_file_size_verification(self):
- self._test_result_size_verification("binary")
+ self._test_result_size_verification("binary", dynamic_diagnostics=True)
- def test_binary_file_size_early_abort(self):
- self._test_result_size_early_abort("binary")
+ def test_binary_file_size_verification(self):
+ self._test_result_size_verification("binary")
+
+ def test_binary_file_size_early_abort(self):
+ self._test_result_size_early_abort("binary")
- def test_small_size_binary_file(self):
- self._test_result_exception("binary")
-
- def test_small_size_binary_file_cs(self):
- self._test_result_exception("binary", fmi_type="cs")
-
- def test_small_size_binary_file_stream(self):
- self._test_result_exception("binary", BytesIO())
+ def test_small_size_binary_file(self):
+ self._test_result_exception("binary")
+
+ def test_small_size_binary_file_cs(self):
+ self._test_result_exception("binary", fmi_type="cs")
+
+ def test_small_size_binary_file_stream(self):
+ self._test_result_exception("binary", BytesIO())
- def test_large_size_binary_file(self):
- self._test_result("binary")
+ def test_large_size_binary_file(self):
+ self._test_result("binary")
- def test_large_size_binary_file_stream(self):
- self._test_result("binary", BytesIO())
+ def test_large_size_binary_file_stream(self):
+ self._test_result("binary", BytesIO())
- """
- Text
- """
- def test_text_file_size_verification(self):
- self._test_result_size_verification("file")
-
- def test_text_file_size_early_abort(self):
- self._test_result_size_early_abort("file")
+ """
+ Text
+ """
+ def test_text_file_size_verification(self):
+ self._test_result_size_verification("file")
+
+ def test_text_file_size_early_abort(self):
+ self._test_result_size_early_abort("file")
- def test_small_size_text_file(self):
- self._test_result_exception("file")
-
- def test_small_size_text_file_stream(self):
- self._test_result_exception("file", StringIO())
+ def test_small_size_text_file(self):
+ self._test_result_exception("file")
+
+ def test_small_size_text_file_stream(self):
+ self._test_result_exception("file", StringIO())
- def test_large_size_text_file(self):
- self._test_result("file")
+ def test_large_size_text_file(self):
+ self._test_result("file")
- def test_large_size_text_file_stream(self):
- self._test_result("file", StringIO())
+ def test_large_size_text_file_stream(self):
+ self._test_result("file", StringIO())
- """
- CSV
- """
- def test_csv_file_size_verification(self):
- self._test_result_size_verification("csv")
-
- def test_csv_file_size_early_abort(self):
- self._test_result_size_early_abort("csv")
+ """
+ CSV
+ """
+ def test_csv_file_size_verification(self):
+ self._test_result_size_verification("csv")
+
+ def test_csv_file_size_early_abort(self):
+ self._test_result_size_early_abort("csv")
- def test_small_size_csv_file(self):
- self._test_result_exception("csv")
-
- def test_small_size_csv_file_stream(self):
- self._test_result_exception("csv", StringIO())
+ def test_small_size_csv_file(self):
+ self._test_result_exception("csv")
+
+ def test_small_size_csv_file_stream(self):
+ self._test_result_exception("csv", StringIO())
- def test_large_size_csv_file(self):
- self._test_result("csv", max_size=10000000)
+ def test_large_size_csv_file(self):
+ self._test_result("csv", max_size=10000000)
- def test_large_size_csv_file_stream(self):
- self._test_result("csv", StringIO(), max_size=10000000)
+ def test_large_size_csv_file_stream(self):
+ self._test_result("csv", StringIO(), max_size=10000000)
- """
- Memory
- """
- def test_small_size_memory(self):
- self._test_result_exception("memory")
-
- def test_memory_size_early_abort(self):
- self._test_result_size_early_abort("memory")
-
- def test_small_size_memory_stream(self):
- self._test_result_exception("memory", StringIO())
+ """
+ Memory
+ """
+ def test_small_size_memory(self):
+ self._test_result_exception("memory")
+
+ def test_memory_size_early_abort(self):
+ self._test_result_size_early_abort("memory")
+
+ def test_small_size_memory_stream(self):
+ self._test_result_exception("memory", StringIO())
- def test_large_size_memory(self):
- self._test_result("memory")
+ def test_large_size_memory(self):
+ self._test_result("memory")
- def test_large_size_memory_stream(self):
- self._test_result("memory", StringIO())
+ def test_large_size_memory_stream(self):
+ self._test_result("memory", StringIO())
diff --git a/tests/test_log.py b/tests/test_log.py
index 056e557a..366c902a 100644
--- a/tests/test_log.py
+++ b/tests/test_log.py
@@ -19,7 +19,7 @@
from pyfmi.common.log import extract_xml_log, parse_xml_log
from pyfmi.common.diagnostics import DIAGNOSTICS_PREFIX
-from pyfmi.tests.test_util import Dummy_FMUModelME2
+from pyfmi.test_util import Dummy_FMUModelME2
from pyfmi.fmi_util import decode
import numpy as np
@@ -163,7 +163,6 @@ def test_truncated_log_valid_xml(self):
""" Test that a truncated log still contains valid XML."""
# XXX: There currently is no FMU is linux binaries running on Ubuntu 20+ (libgfortran issues)
# XXX: This is not a very good test, since it largely tests the mocked implementation, but better than nothing
- file_path = os.path.dirname(os.path.abspath(__file__))
fmu_name = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Bouncing_Ball.fmu")
# 1. Simulate + determine log size that corresponds to a truncation (resulting in invalid XML)
@@ -209,7 +208,6 @@ def test_truncated_log_valid_xml(self):
def test_resume_logging_on_increased_max_log_size(self):
"""Test that logging will resume when increasing max log size & previously exceeding the maximal size."""
- file_path = os.path.dirname(os.path.abspath(__file__))
fmu_name = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Bouncing_Ball.fmu")
fmu = Dummy_FMUModelME2([], fmu_name, _connect_dll=False)
diff --git a/tests/test_stream.py b/tests/test_stream.py
index fd8be522..c821fe28 100644
--- a/tests/test_stream.py
+++ b/tests/test_stream.py
@@ -23,7 +23,7 @@
from filecmp import cmp as compare_files
from pyfmi.fmi import FMUException, load_fmu, FMUModelCS2, FMUModelME2
-from pyfmi.tests.test_util import get_examples_folder
+from pyfmi.test_util import get_examples_folder
file_path = os.path.dirname(os.path.abspath(__file__))