Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 0 additions & 8 deletions homeassistant/components/recorder/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -695,8 +695,6 @@ def _sorted_states_to_dict(
prev_state = first_state.state
ent_results.append(state_class(first_state, attr_cache))

initial_state_count = len(ent_results)
row = None
for row in group:
# With minimal response we do not care about attribute
# changes so we can filter out duplicate states
Expand All @@ -716,12 +714,6 @@ def _sorted_states_to_dict(
)
prev_state = state

if row and len(ent_results) != initial_state_count:
# There was at least one state change
# replace the last minimal state with
# a full state
ent_results[-1] = state_class(row, attr_cache)

# If there are no states beyond the initial state,
# the state a was never popped from initial_states
for ent_id, row in initial_states.items():
Expand Down
44 changes: 29 additions & 15 deletions tests/components/history/test_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,23 +61,30 @@ def test_get_significant_states_minimal_response(hass_history):
hist = get_significant_states(
hass, zero, four, filters=history.Filters(), minimal_response=True
)
entites_with_reducable_states = [
"media_player.test",
"media_player.test3",
]

# The second media_player.test state is reduced
# All states for media_player.test state are reduced
# down to last_changed and state when minimal_response
# is set except for the first state.
# is set. We use JSONEncoder to make sure that are
# pre-encoded last_changed is always the same as what
# will happen with encoding a native state
input_state = states["media_player.test"][1]
orig_last_changed = json.dumps(
process_timestamp(input_state.last_changed),
cls=JSONEncoder,
).replace('"', "")
orig_state = input_state.state
states["media_player.test"][1] = {
"last_changed": orig_last_changed,
"state": orig_state,
}

for entity_id in entites_with_reducable_states:
entity_states = states[entity_id]
for state_idx in range(1, len(entity_states)):
input_state = entity_states[state_idx]
orig_last_changed = orig_last_changed = json.dumps(
process_timestamp(input_state.last_changed),
cls=JSONEncoder,
).replace('"', "")
orig_state = input_state.state
entity_states[state_idx] = {
"last_changed": orig_last_changed,
"state": orig_state,
}
assert states == hist


Expand Down Expand Up @@ -616,6 +623,9 @@ async def test_fetch_period_api_with_minimal_response(hass, recorder_mock, hass_
hass.states.async_set("sensor.power", 50, {"attr": "any"})
await async_wait_recording_done(hass)
hass.states.async_set("sensor.power", 23, {"attr": "any"})
last_changed = hass.states.get("sensor.power").last_changed
await async_wait_recording_done(hass)
hass.states.async_set("sensor.power", 23, {"attr": "any"})
await async_wait_recording_done(hass)
client = await hass_client()
response = await client.get(
Expand All @@ -634,9 +644,13 @@ async def test_fetch_period_api_with_minimal_response(hass, recorder_mock, hass_
assert "entity_id" not in state_list[1]
assert state_list[1]["state"] == "50"

assert state_list[2]["entity_id"] == "sensor.power"
assert state_list[2]["attributes"] == {}
assert "attributes" not in state_list[2]
assert "entity_id" not in state_list[2]
assert state_list[2]["state"] == "23"
assert state_list[2]["last_changed"] == json.dumps(
process_timestamp(last_changed),
cls=JSONEncoder,
).replace('"', "")


async def test_fetch_period_api_with_no_timestamp(hass, hass_client, recorder_mock):
Expand Down Expand Up @@ -1131,7 +1145,7 @@ async def test_history_during_period(hass, hass_ws_client, recorder_mock):
assert "lc" not in sensor_test_history[1] # skipped if the same a last_updated (lu)

assert sensor_test_history[2]["s"] == "on"
assert sensor_test_history[2]["a"] == {}
assert "a" not in sensor_test_history[2]

await client.send_json(
{
Expand Down
33 changes: 20 additions & 13 deletions tests/components/recorder/test_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,23 +388,30 @@ def test_get_significant_states_minimal_response(hass_recorder):
hass = hass_recorder()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, minimal_response=True)
entites_with_reducable_states = [
"media_player.test",
"media_player.test3",
]

# The second media_player.test state is reduced
# All states for media_player.test state are reduced
# down to last_changed and state when minimal_response
# is set except for the first state.
# is set. We use JSONEncoder to make sure that are
# pre-encoded last_changed is always the same as what
# will happen with encoding a native state
input_state = states["media_player.test"][1]
orig_last_changed = json.dumps(
process_timestamp(input_state.last_changed),
cls=JSONEncoder,
).replace('"', "")
orig_state = input_state.state
states["media_player.test"][1] = {
"last_changed": orig_last_changed,
"state": orig_state,
}

for entity_id in entites_with_reducable_states:
entity_states = states[entity_id]
for state_idx in range(1, len(entity_states)):
input_state = entity_states[state_idx]
orig_last_changed = orig_last_changed = json.dumps(
process_timestamp(input_state.last_changed),
cls=JSONEncoder,
).replace('"', "")
orig_state = input_state.state
entity_states[state_idx] = {
"last_changed": orig_last_changed,
"state": orig_state,
}
assert states == hist


Expand Down Expand Up @@ -565,7 +572,7 @@ def set_state(state, **kwargs):
assert states == hist[entity_id]


def record_states(hass):
def record_states(hass) -> tuple[datetime, datetime, dict[str, list[State]]]:
"""Record some test states.

We inject a bunch of state updates from media player, zone and
Expand Down