Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 52 additions & 18 deletions homeassistant/components/logbook/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,13 @@
STATE_OFF,
STATE_ON,
)
from homeassistant.core import DOMAIN as HA_DOMAIN, callback, split_entity_id
from homeassistant.core import (
DOMAIN as HA_DOMAIN,
callback,
split_entity_id,
valid_entity_id,
)
from homeassistant.exceptions import InvalidEntityFormatError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import (
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
Expand All @@ -51,6 +57,7 @@
from homeassistant.loader import bind_hass
import homeassistant.util.dt as dt_util

ENTITY_ID_JSON_TEMPLATE = '"entity_id": "{}"'
ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": "([^"]+)"')
DOMAIN_JSON_EXTRACT = re.compile('"domain": "([^"]+)"')

Expand Down Expand Up @@ -87,7 +94,6 @@

SCRIPT_AUTOMATION_EVENTS = [EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED]


LOG_MESSAGE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NAME): cv.string,
Expand Down Expand Up @@ -214,6 +220,8 @@ async def get(self, request, datetime=None):

hass = request.app["hass"]

entity_matches_only = "entity_matches_only" in request.query

def json_events():
"""Fetch events and generate JSON."""
return self.json(
Expand All @@ -224,6 +232,7 @@ def json_events():
entity_id,
self.filters,
self.entities_filter,
entity_matches_only,
)
)

Expand Down Expand Up @@ -390,11 +399,19 @@ def humanify(hass, events, entity_attr_cache, context_lookup):


def _get_events(
hass, start_day, end_day, entity_id=None, filters=None, entities_filter=None
hass,
start_day,
end_day,
entity_id=None,
filters=None,
entities_filter=None,
entity_matches_only=False,
):
"""Get events for a period of time."""
entity_attr_cache = EntityAttributeCache(hass)
context_lookup = {None: None}
entity_id_lower = None
apply_sql_entities_filter = True

def yield_events(query):
"""Yield Events that are not filtered away."""
Expand All @@ -404,15 +421,17 @@ def yield_events(query):
if _keep_event(hass, event, entities_filter):
yield event

with session_scope(hass=hass) as session:
if entity_id is not None:
entity_ids = [entity_id.lower()]
entities_filter = generate_filter([], entity_ids, [], [])
apply_sql_entities_filter = False
else:
entity_ids = None
apply_sql_entities_filter = True
if entity_id is not None:
entity_id_lower = entity_id.lower()
if not valid_entity_id(entity_id_lower):
raise InvalidEntityFormatError(
f"Invalid entity id encountered: {entity_id_lower}. "
"Format should be <domain>.<object_id>"
)
entities_filter = generate_filter([], [entity_id_lower], [], [])
apply_sql_entities_filter = False

with session_scope(hass=hass) as session:
old_state = aliased(States, name="old_state")

query = (
Expand Down Expand Up @@ -458,14 +477,29 @@ def yield_events(query):
.filter((Events.time_fired > start_day) & (Events.time_fired < end_day))
)

if entity_ids:
query = query.filter(
(
(States.last_updated == States.last_changed)
& States.entity_id.in_(entity_ids)
if entity_id_lower is not None:
if entity_matches_only:
# When entity_matches_only is provided, contexts and events that do not
# contain the entity_id are not included in the logbook response.
entity_id_json = ENTITY_ID_JSON_TEMPLATE.format(entity_id_lower)
query = query.filter(
(
(States.last_updated == States.last_changed)
& (States.entity_id == entity_id_lower)
)
| (
States.state_id.is_(None)
& Events.event_data.contains(entity_id_json)
)
)
else:
query = query.filter(
(
(States.last_updated == States.last_changed)
& (States.entity_id == entity_id_lower)
)
| (States.state_id.is_(None))
)
| (States.state_id.is_(None))
)
else:
query = query.filter(
(States.last_updated == States.last_changed)
Expand Down
101 changes: 98 additions & 3 deletions tests/components/logbook/test_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -2021,7 +2021,7 @@ async def test_logbook_context_from_template(hass, hass_client):
}
},
)
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
Expand All @@ -2043,9 +2043,9 @@ async def test_logbook_context_from_template(hass, hass_client):
)
await hass.async_block_till_done()

await hass.async_add_job(trigger_db_commit, hass)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)

client = await hass_client()

Expand Down Expand Up @@ -2081,6 +2081,101 @@ async def test_logbook_context_from_template(hass, hass_client):
assert json_dict[5]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"


async def test_logbook_entity_matches_only(hass, hass_client):
"""Test the logbook view with a single entity and entity_matches_only."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "template",
"switches": {
"test_template_switch": {
"value_template": "{{ states.switch.test_state.state }}",
"turn_on": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"turn_off": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()

# Entity added (should not be logged)
hass.states.async_set("switch.test_state", STATE_ON)
await hass.async_block_till_done()

# First state change (should be logged)
hass.states.async_set("switch.test_state", STATE_OFF)
await hass.async_block_till_done()

switch_turn_off_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set(
"switch.test_state", STATE_ON, context=switch_turn_off_context
)
await hass.async_block_till_done()

await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)

client = await hass_client()

# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)

# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state&entity_matches_only"
)
assert response.status == 200
json_dict = await response.json()

assert len(json_dict) == 2

assert json_dict[0]["entity_id"] == "switch.test_state"
assert json_dict[0]["message"] == "turned off"

assert json_dict[1]["entity_id"] == "switch.test_state"
assert json_dict[1]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
assert json_dict[1]["message"] == "turned on"


async def test_logbook_invalid_entity(hass, hass_client):
"""Test the logbook view with requesting an invalid entity."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_block_till_done()
client = await hass_client()

# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)

# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=invalid&entity_matches_only"
)
assert response.status == 500


class MockLazyEventPartialState(ha.Event):
"""Minimal mock of a Lazy event."""

Expand Down