Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 52 additions & 56 deletions homeassistant/components/history/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from typing import Optional, cast

from aiohttp import web
from sqlalchemy import and_, bindparam, func
from sqlalchemy import and_, bindparam, func, not_, or_
from sqlalchemy.ext import baked
import voluptuous as vol

Expand All @@ -29,6 +29,10 @@
)
from homeassistant.core import Context, State, split_entity_id
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import (
CONF_ENTITY_GLOBS,
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
)
import homeassistant.util.dt as dt_util

# mypy: allow-untyped-defs, no-check-untyped-defs
Expand All @@ -41,27 +45,20 @@
STATE_KEY = "state"
LAST_CHANGED_KEY = "last_changed"

# Not reusing from entityfilter because history does not support glob filtering
_FILTER_SCHEMA_INNER = vol.Schema(
{
vol.Optional(CONF_DOMAINS, default=[]): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
}
)
_FILTER_SCHEMA = vol.Schema(
GLOB_TO_SQL_CHARS = {
42: "%", # *
46: "_", # .
}

CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(
CONF_INCLUDE, default=_FILTER_SCHEMA_INNER({})
): _FILTER_SCHEMA_INNER,
vol.Optional(
CONF_EXCLUDE, default=_FILTER_SCHEMA_INNER({})
): _FILTER_SCHEMA_INNER,
vol.Optional(CONF_ORDER, default=False): cv.boolean,
}
DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
{vol.Optional(CONF_ORDER, default=False): cv.boolean}
)
},
extra=vol.ALLOW_EXTRA,
)

CONFIG_SCHEMA = vol.Schema({DOMAIN: _FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA)

SIGNIFICANT_DOMAINS = (
"climate",
"device_tracker",
Expand Down Expand Up @@ -563,10 +560,12 @@ def sqlalchemy_filter_from_include_exclude_conf(conf):
if exclude:
filters.excluded_entities = exclude.get(CONF_ENTITIES, [])
filters.excluded_domains = exclude.get(CONF_DOMAINS, [])
filters.excluded_entity_globs = exclude.get(CONF_ENTITY_GLOBS, [])
include = conf.get(CONF_INCLUDE)
if include:
filters.included_entities = include.get(CONF_ENTITIES, [])
filters.included_domains = include.get(CONF_DOMAINS, [])
filters.included_entity_globs = include.get(CONF_ENTITY_GLOBS, [])
return filters


Expand All @@ -577,8 +576,11 @@ def __init__(self):
"""Initialise the include and exclude filters."""
self.excluded_entities = []
self.excluded_domains = []
self.excluded_entity_globs = []

self.included_entities = []
self.included_domains = []
self.included_entity_globs = []

def apply(self, query, entity_ids=None):
"""Apply the include/exclude filter on domains and entities on query.
Expand Down Expand Up @@ -619,52 +621,46 @@ def bake(self, baked_query, entity_ids=None):
if (
self.excluded_entities
or self.excluded_domains
or self.excluded_entity_globs
or self.included_entities
or self.included_domains
or self.included_entity_globs
):
baked_query += lambda q: q.filter(self.entity_filter())

def entity_filter(self):
"""Generate the entity filter query."""
entity_filter = None
# filter if only excluded domain is configured
if self.excluded_domains and not self.included_domains:
entity_filter = ~States.domain.in_(self.excluded_domains)
if self.included_entities:
entity_filter &= States.entity_id.in_(self.included_entities)
# filter if only included domain is configured
elif not self.excluded_domains and self.included_domains:
entity_filter = States.domain.in_(self.included_domains)
if self.included_entities:
entity_filter |= States.entity_id.in_(self.included_entities)
# filter if included and excluded domain is configured
elif self.excluded_domains and self.included_domains:
entity_filter = ~States.domain.in_(self.excluded_domains)
if self.included_entities:
entity_filter &= States.domain.in_(
self.included_domains
) | States.entity_id.in_(self.included_entities)
else:
entity_filter &= States.domain.in_(
self.included_domains
) & ~States.domain.in_(self.excluded_domains)
# no domain filter just included entities
elif (
not self.excluded_domains
and not self.included_domains
and self.included_entities
):
entity_filter = States.entity_id.in_(self.included_entities)
# finally apply excluded entities filter if configured
includes = []
if self.included_domains:
includes.append(States.domain.in_(self.included_domains))
if self.included_entities:
includes.append(States.entity_id.in_(self.included_entities))
for glob in self.included_entity_globs:
includes.append(_glob_to_like(glob))

excludes = []
if self.excluded_domains:
excludes.append(States.domain.in_(self.excluded_domains))
if self.excluded_entities:
if entity_filter is not None:
entity_filter = (entity_filter) & ~States.entity_id.in_(
self.excluded_entities
)
else:
entity_filter = ~States.entity_id.in_(self.excluded_entities)
excludes.append(States.entity_id.in_(self.excluded_entities))
for glob in self.excluded_entity_globs:
excludes.append(_glob_to_like(glob))

if not includes and not excludes:
return None

if includes and not excludes:
return or_(*includes)

if not excludes and includes:
return not_(or_(*excludes))

return or_(*includes) & not_(or_(*excludes))


return entity_filter
def _glob_to_like(glob_str):
"""Translate glob to sql."""
return States.entity_id.like(glob_str.translate(GLOB_TO_SQL_CHARS))


class LazyState(State):
Expand Down
3 changes: 0 additions & 3 deletions homeassistant/components/logbook/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -591,9 +591,6 @@ def _keep_event(hass, event, entities_filter):
if event.event_type in HOMEASSISTANT_EVENTS:
return entities_filter is None or entities_filter(HA_DOMAIN_ENTITY_ID)

if event.event_type == EVENT_STATE_CHANGED:
return entities_filter is None or entities_filter(event.entity_id)

entity_id = event.data_entity_id
if entity_id:
return entities_filter is None or entities_filter(entity_id)
Expand Down
119 changes: 118 additions & 1 deletion tests/components/history/test_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
init_recorder_component,
mock_state_change_event,
)
from tests.components.recorder.common import wait_recording_done
from tests.components.recorder.common import trigger_db_commit, wait_recording_done


class TestComponentHistory(unittest.TestCase):
Expand Down Expand Up @@ -823,3 +823,120 @@ async def test_fetch_period_api_with_include_order(hass, hass_client):
params={"filter_entity_id": "non.existing,something.else"},
)
assert response.status == 200


async def test_fetch_period_api_with_entity_glob_include(hass, hass_client):
"""Test the fetch period view for history."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"include": {"entity_globs": ["light.k*"]},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")

await hass.async_block_till_done()

await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)

client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert response_json[0][0]["entity_id"] == "light.kitchen"


async def test_fetch_period_api_with_entity_glob_exclude(hass, hass_client):
"""Test the fetch period view for history."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.k*"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")

await hass.async_block_till_done()

await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)

client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.cow"
assert response_json[1][0]["entity_id"] == "light.match"


async def test_fetch_period_api_with_entity_glob_include_and_exclude(hass, hass_client):
"""Test the fetch period view for history."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.many*"],
},
"include": {
"entity_globs": ["light.m*"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("light.many_state_changes", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")

await hass.async_block_till_done()

await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)

client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0][0]["entity_id"] == "light.match"
assert response_json[1][0]["entity_id"] == "media_player.test"
assert response_json[2][0]["entity_id"] == "switch.match"
Loading