forked from platysma/hass-vienna-smartmeter
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_config_flow.py
146 lines (111 loc) · 5.35 KB
/
test_config_flow.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
"""Test vienna_smartmeter config flow."""
from unittest.mock import patch
from homeassistant import config_entries, data_entry_flow
import pytest
from pytest_homeassistant_custom_component.common import MockConfigEntry
from custom_components.vienna_smartmeter.const import CONF_SCAN_INTERVAL, DOMAIN
from .const import MOCK_CONFIG, MOCK_FAIL_CONFIG
# This fixture bypasses the actual setup of the integration
# since we only want to test the config flow. We test the
# actual functionality of the integration in other test modules.
@pytest.fixture(autouse=True)
def bypass_setup_fixture():
"""Prevent setup."""
with patch(
"custom_components.vienna_smartmeter.async_setup_entry",
return_value=True,
):
yield
# Here we simiulate a successful config flow from the backend.
# Note that we use the `bypass_get_data` fixture here because
# we want the config flow validation to succeed during the test.
async def test_successful_config_flow(hass, bypass_get_data):
"""Test a successful config flow."""
# Initialize a config flow
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
# Check that the config flow shows the user form as the first step
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# If a user were to enter `test_username` for username and `test_password`
# for password, it would result in this function call
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG
)
# Check that the config flow is complete and a new entry is created with
# the input data
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "demouser"
assert result["data"] == MOCK_CONFIG
assert result["result"]
# In this case, we want to simulate a failure during the config flow.
# We use the `error_on_get_data` mock instead of `bypass_get_data`
# (note the function parameters) to raise an Exception during
# validation of the input config.
async def test_failed_config_flow(hass, error_on_get_data):
"""Test a failed config flow due to credential validation failure."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_FAIL_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "auth"}
# In this case, we want to simulate a failure during the config flow.
# We use the `error_on_get_data` mock instead of `bypass_get_data`
# (note the function parameters) to raise an Exception during
# validation of the input config.
async def test_failed_auth_config_flow(hass, auth_error_on_get_data):
"""Test a failed config flow due to credential validation failure."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_FAIL_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "auth"}
# In this case, we want to simulate a failure during the config flow.
# We use the `error_on_get_data` mock instead of `bypass_get_data`
# (note the function parameters) to raise an Exception during
# validation of the input config.
async def test_failed_timeout_config_flow(hass, timeout_error_on_get_data):
"""Test a failed config flow due to credential validation failure."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_FAIL_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "auth"}
# Our config flow also has an options flow, so we must test it as well.
async def test_options_flow(hass):
"""Test an options flow."""
# Create a new MockConfigEntry and add to HASS (we're bypassing config
# flow entirely)
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, entry_id="test")
entry.add_to_hass(hass)
# Initialize an options flow
await hass.config_entries.async_setup(entry.entry_id)
result = await hass.config_entries.options.async_init(entry.entry_id)
# Verify that the first options step is a user form
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# Enter some fake data into the form
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_SCAN_INTERVAL: 22},
)
# Verify that the flow finishes
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
# Verify that the options were updated
assert entry.options == {CONF_SCAN_INTERVAL: 22}