init
This commit is contained in:
578
custom_components/alarmo/__init__.py
Normal file
578
custom_components/alarmo/__init__.py
Normal file
@@ -0,0 +1,578 @@
|
||||
"""The Alarmo Integration."""
|
||||
|
||||
import re
|
||||
import base64
|
||||
import logging
|
||||
import concurrent.futures
|
||||
|
||||
import bcrypt
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
asyncio,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_CODE,
|
||||
ATTR_NAME,
|
||||
)
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.helpers.service import (
|
||||
async_register_admin_service,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
async_dispatcher_connect,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.components.alarm_control_panel import DOMAIN as PLATFORM
|
||||
|
||||
from . import const
|
||||
from .card import async_register_card
|
||||
from .mqtt import MqttHandler
|
||||
from .event import EventHandler
|
||||
from .panel import (
|
||||
async_register_panel,
|
||||
async_unregister_panel,
|
||||
)
|
||||
from .store import async_get_registry
|
||||
from .sensors import (
|
||||
ATTR_GROUP,
|
||||
ATTR_ENTITIES,
|
||||
ATTR_NEW_ENTITY_ID,
|
||||
SensorHandler,
|
||||
)
|
||||
from .websockets import async_register_websockets
|
||||
from .automations import AutomationHandler
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Max number of threads to start when checking user codes.
|
||||
MAX_WORKERS = 4
|
||||
# Number of rounds of hashing when computing user hashes.
|
||||
BCRYPT_NUM_ROUNDS = 10
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Track states and offer events for sensors."""
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Set up Alarmo integration from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
store = await async_get_registry(hass)
|
||||
coordinator = AlarmoCoordinator(hass, session, entry, store)
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(const.DOMAIN, coordinator.id)},
|
||||
name=const.NAME,
|
||||
model=const.NAME,
|
||||
sw_version=const.VERSION,
|
||||
manufacturer=const.MANUFACTURER,
|
||||
)
|
||||
|
||||
hass.data.setdefault(const.DOMAIN, {})
|
||||
hass.data[const.DOMAIN] = {"coordinator": coordinator, "areas": {}, "master": None}
|
||||
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=coordinator.id, data={})
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, [PLATFORM])
|
||||
|
||||
# Register the panel (frontend)
|
||||
await async_register_panel(hass)
|
||||
await async_register_card(hass)
|
||||
|
||||
# Websocket support
|
||||
await async_register_websockets(hass)
|
||||
|
||||
# Register custom services
|
||||
register_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass, entry):
|
||||
"""Unload Alarmo config entry."""
|
||||
unload_ok = all(
|
||||
await asyncio.gather(
|
||||
*[hass.config_entries.async_forward_entry_unload(entry, PLATFORM)]
|
||||
)
|
||||
)
|
||||
if not unload_ok:
|
||||
return False
|
||||
|
||||
async_unregister_panel(hass)
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
await coordinator.async_unload()
|
||||
return True
|
||||
|
||||
|
||||
async def async_remove_entry(hass, entry):
|
||||
"""Remove Alarmo config entry."""
|
||||
async_unregister_panel(hass)
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
await coordinator.async_delete_config()
|
||||
del hass.data[const.DOMAIN]
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Handle migration of config entry."""
|
||||
return True
|
||||
|
||||
|
||||
class AlarmoCoordinator(DataUpdateCoordinator):
|
||||
"""Define an object to hold Alarmo device."""
|
||||
|
||||
def __init__(self, hass, session, entry, store):
|
||||
"""Initialize."""
|
||||
self.id = entry.unique_id
|
||||
self.hass = hass
|
||||
self.entry = entry
|
||||
self.store = store
|
||||
self._subscriptions = []
|
||||
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(
|
||||
hass, "alarmo_platform_loaded", self.setup_alarm_entities
|
||||
)
|
||||
)
|
||||
self.register_events()
|
||||
|
||||
super().__init__(hass, _LOGGER, config_entry=entry, name=const.DOMAIN)
|
||||
|
||||
@callback
|
||||
def setup_alarm_entities(self):
|
||||
"""Set up alarm_control_panel entities based on areas in storage."""
|
||||
self.hass.data[const.DOMAIN]["sensor_handler"] = SensorHandler(self.hass)
|
||||
self.hass.data[const.DOMAIN]["automation_handler"] = AutomationHandler(
|
||||
self.hass
|
||||
)
|
||||
self.hass.data[const.DOMAIN]["mqtt_handler"] = MqttHandler(self.hass)
|
||||
self.hass.data[const.DOMAIN]["event_handler"] = EventHandler(self.hass)
|
||||
|
||||
areas = self.store.async_get_areas()
|
||||
config = self.store.async_get_config()
|
||||
|
||||
for item in areas.values():
|
||||
async_dispatcher_send(self.hass, "alarmo_register_entity", item)
|
||||
|
||||
if len(areas) > 1 and config["master"]["enabled"]:
|
||||
async_dispatcher_send(self.hass, "alarmo_register_master", config["master"])
|
||||
|
||||
async def async_update_config(self, data):
|
||||
"""Update the main configuration."""
|
||||
if "master" in data:
|
||||
old_config = self.store.async_get_config()
|
||||
if old_config[const.ATTR_MASTER] != data["master"]:
|
||||
if self.hass.data[const.DOMAIN]["master"]:
|
||||
await self.async_remove_entity("master")
|
||||
if data["master"]["enabled"]:
|
||||
async_dispatcher_send(
|
||||
self.hass, "alarmo_register_master", data["master"]
|
||||
)
|
||||
else:
|
||||
automations = self.hass.data[const.DOMAIN][
|
||||
"automation_handler"
|
||||
].get_automations_by_area(None)
|
||||
if len(automations):
|
||||
for el in automations:
|
||||
self.store.async_delete_automation(el)
|
||||
async_dispatcher_send(self.hass, "alarmo_automations_updated")
|
||||
|
||||
self.store.async_update_config(data)
|
||||
async_dispatcher_send(self.hass, "alarmo_config_updated")
|
||||
|
||||
async def async_update_area_config(
|
||||
self,
|
||||
area_id: str | None = None,
|
||||
data: dict = {},
|
||||
):
|
||||
"""Update area configuration."""
|
||||
if const.ATTR_REMOVE in data:
|
||||
# delete an area
|
||||
res = self.store.async_get_area(area_id)
|
||||
if not res:
|
||||
return
|
||||
sensors = self.store.async_get_sensors()
|
||||
sensors = dict(filter(lambda el: el[1]["area"] == area_id, sensors.items()))
|
||||
if sensors:
|
||||
for el in sensors.keys():
|
||||
self.store.async_delete_sensor(el)
|
||||
async_dispatcher_send(self.hass, "alarmo_sensors_updated")
|
||||
|
||||
automations = self.hass.data[const.DOMAIN][
|
||||
"automation_handler"
|
||||
].get_automations_by_area(area_id)
|
||||
if len(automations):
|
||||
for el in automations:
|
||||
self.store.async_delete_automation(el)
|
||||
async_dispatcher_send(self.hass, "alarmo_automations_updated")
|
||||
|
||||
self.store.async_delete_area(area_id)
|
||||
await self.async_remove_entity(area_id)
|
||||
|
||||
if (
|
||||
len(self.store.async_get_areas()) == 1
|
||||
and self.hass.data[const.DOMAIN]["master"]
|
||||
):
|
||||
await self.async_remove_entity("master")
|
||||
|
||||
elif self.store.async_get_area(area_id):
|
||||
# modify an area
|
||||
entry = self.store.async_update_area(area_id, data)
|
||||
if "name" not in data:
|
||||
async_dispatcher_send(self.hass, "alarmo_config_updated", area_id)
|
||||
else:
|
||||
await self.async_remove_entity(area_id)
|
||||
async_dispatcher_send(self.hass, "alarmo_register_entity", entry)
|
||||
else:
|
||||
# create an area
|
||||
entry = self.store.async_create_area(data)
|
||||
async_dispatcher_send(self.hass, "alarmo_register_entity", entry)
|
||||
|
||||
config = self.store.async_get_config()
|
||||
|
||||
if len(self.store.async_get_areas()) == 2 and config["master"]["enabled"]:
|
||||
async_dispatcher_send(
|
||||
self.hass, "alarmo_register_master", config["master"]
|
||||
)
|
||||
|
||||
def async_update_sensor_config(self, entity_id: str, data: dict):
|
||||
"""Update sensor configuration."""
|
||||
group = None
|
||||
if ATTR_GROUP in data:
|
||||
group = data[ATTR_GROUP]
|
||||
del data[ATTR_GROUP]
|
||||
|
||||
if ATTR_NEW_ENTITY_ID in data:
|
||||
# delete old sensor entry when changing the entity_id
|
||||
new_entity_id = data[ATTR_NEW_ENTITY_ID]
|
||||
del data[ATTR_NEW_ENTITY_ID]
|
||||
self.store.async_delete_sensor(entity_id)
|
||||
self.assign_sensor_to_group(new_entity_id, group)
|
||||
self.assign_sensor_to_group(entity_id, None)
|
||||
entity_id = new_entity_id
|
||||
|
||||
if const.ATTR_REMOVE in data:
|
||||
self.store.async_delete_sensor(entity_id)
|
||||
self.assign_sensor_to_group(entity_id, None)
|
||||
elif self.store.async_get_sensor(entity_id):
|
||||
self.store.async_update_sensor(entity_id, data)
|
||||
self.assign_sensor_to_group(entity_id, group)
|
||||
else:
|
||||
self.store.async_create_sensor(entity_id, data)
|
||||
self.assign_sensor_to_group(entity_id, group)
|
||||
|
||||
async_dispatcher_send(self.hass, "alarmo_sensors_updated")
|
||||
|
||||
def _validate_user_code(self, user_id: str, data: dict):
|
||||
user_with_code = self.async_authenticate_user(data[ATTR_CODE])
|
||||
if user_id:
|
||||
if const.ATTR_OLD_CODE not in data:
|
||||
return "No code provided"
|
||||
if not self.async_authenticate_user(data[const.ATTR_OLD_CODE], user_id):
|
||||
return "Wrong code provided"
|
||||
if user_with_code and user_with_code[const.ATTR_USER_ID] != user_id:
|
||||
return "User with same code already exists"
|
||||
elif user_with_code:
|
||||
return "User with same code already exists"
|
||||
return
|
||||
|
||||
def _validate_user_name(self, user_id: str, data: dict):
|
||||
if not data[ATTR_NAME]:
|
||||
return "User name must not be empty"
|
||||
for user in self.store.async_get_users().values():
|
||||
if (
|
||||
data[ATTR_NAME] == user[ATTR_NAME]
|
||||
and user_id != user[const.ATTR_USER_ID]
|
||||
):
|
||||
return "User with same name already exists"
|
||||
return
|
||||
|
||||
def async_update_user_config(self, user_id: str | None = None, data: dict = {}):
|
||||
"""Update user configuration."""
|
||||
if const.ATTR_REMOVE in data:
|
||||
self.store.async_delete_user(user_id)
|
||||
return
|
||||
|
||||
if ATTR_NAME in data:
|
||||
err = self._validate_user_name(user_id, data)
|
||||
if err:
|
||||
_LOGGER.error(err)
|
||||
return err
|
||||
if ATTR_CODE in data:
|
||||
err = self._validate_user_code(user_id, data)
|
||||
if err:
|
||||
_LOGGER.error(err)
|
||||
return err
|
||||
|
||||
if data.get(ATTR_CODE):
|
||||
data[const.ATTR_CODE_FORMAT] = (
|
||||
"number" if data[ATTR_CODE].isdigit() else "text"
|
||||
)
|
||||
data[const.ATTR_CODE_LENGTH] = len(data[ATTR_CODE])
|
||||
hashed = bcrypt.hashpw(
|
||||
data[ATTR_CODE].encode("utf-8"),
|
||||
bcrypt.gensalt(rounds=BCRYPT_NUM_ROUNDS),
|
||||
)
|
||||
hashed = base64.b64encode(hashed)
|
||||
data[ATTR_CODE] = hashed.decode()
|
||||
|
||||
if not user_id:
|
||||
self.store.async_create_user(data)
|
||||
return
|
||||
else:
|
||||
if ATTR_CODE in data:
|
||||
del data[const.ATTR_OLD_CODE]
|
||||
self.store.async_update_user(user_id, data)
|
||||
return
|
||||
|
||||
def async_authenticate_user(self, code: str, user_id: str | None = None):
|
||||
"""Authenticate a user by code."""
|
||||
|
||||
def check_user_code(user, code):
|
||||
"""Returns the supplied user object if the code matches, None otherwise."""
|
||||
if not user[const.ATTR_ENABLED]:
|
||||
return
|
||||
elif not user[ATTR_CODE] and not code:
|
||||
return user
|
||||
elif user[ATTR_CODE]:
|
||||
hash = base64.b64decode(user[ATTR_CODE])
|
||||
if bcrypt.checkpw(code.encode("utf-8"), hash):
|
||||
return user
|
||||
|
||||
if user_id:
|
||||
return check_user_code(self.store.async_get_user(user_id), code)
|
||||
|
||||
users = self.store.async_get_users()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
|
||||
futures = [
|
||||
executor.submit(check_user_code, user, code) for user in users.values()
|
||||
]
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
if future.result():
|
||||
executor.shutdown(wait=False, cancel_futures=True)
|
||||
return future.result()
|
||||
|
||||
def async_update_automation_config(
|
||||
self,
|
||||
automation_id: str | None = None,
|
||||
data: dict = {},
|
||||
):
|
||||
"""Update automation configuration."""
|
||||
if const.ATTR_REMOVE in data:
|
||||
self.store.async_delete_automation(automation_id)
|
||||
elif not automation_id:
|
||||
self.store.async_create_automation(data)
|
||||
else:
|
||||
self.store.async_update_automation(automation_id, data)
|
||||
|
||||
async_dispatcher_send(self.hass, "alarmo_automations_updated")
|
||||
|
||||
def register_events(self):
|
||||
"""Register event handlers."""
|
||||
|
||||
# handle push notifications with action buttons
|
||||
@callback
|
||||
async def async_handle_push_event(event):
|
||||
if not event.data:
|
||||
return
|
||||
action = (
|
||||
event.data.get("actionName")
|
||||
if "actionName" in event.data
|
||||
else event.data.get("action")
|
||||
)
|
||||
|
||||
if action not in const.EVENT_ACTIONS:
|
||||
return
|
||||
|
||||
if self.hass.data[const.DOMAIN]["master"]:
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["master"]
|
||||
elif len(self.hass.data[const.DOMAIN]["areas"]) == 1:
|
||||
alarm_entity = next(
|
||||
iter(self.hass.data[const.DOMAIN]["areas"].values())
|
||||
)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Cannot process the push action, since there are multiple areas."
|
||||
)
|
||||
return
|
||||
|
||||
arm_mode = (
|
||||
alarm_entity._revert_state
|
||||
if alarm_entity._revert_state in const.ARM_MODES
|
||||
else alarm_entity._arm_mode
|
||||
)
|
||||
res = re.search(r"^ALARMO_ARM_", action)
|
||||
if res:
|
||||
arm_mode = action.replace("ALARMO_", "").lower().replace("arm", "armed")
|
||||
if not arm_mode:
|
||||
_LOGGER.info(
|
||||
"Cannot process the push action, since the arm mode is not known."
|
||||
)
|
||||
return
|
||||
|
||||
if action == const.EVENT_ACTION_FORCE_ARM:
|
||||
_LOGGER.info("Received request for force arming")
|
||||
alarm_entity.async_handle_arm_request(
|
||||
arm_mode, skip_code=True, bypass_open_sensors=True
|
||||
)
|
||||
elif action == const.EVENT_ACTION_RETRY_ARM:
|
||||
_LOGGER.info("Received request for retry arming")
|
||||
alarm_entity.async_handle_arm_request(arm_mode, skip_code=True)
|
||||
elif action == const.EVENT_ACTION_DISARM:
|
||||
_LOGGER.info("Received request for disarming")
|
||||
alarm_entity.alarm_disarm(None, skip_code=True)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Received request for arming with mode %s",
|
||||
arm_mode,
|
||||
)
|
||||
alarm_entity.async_handle_arm_request(arm_mode, skip_code=True)
|
||||
|
||||
self._subscriptions.append(
|
||||
self.hass.bus.async_listen(const.PUSH_EVENT, async_handle_push_event)
|
||||
)
|
||||
|
||||
async def async_remove_entity(self, area_id: str):
|
||||
"""Remove an alarm_control_panel entity."""
|
||||
entity_registry = er.async_get(self.hass)
|
||||
if area_id == "master":
|
||||
entity = self.hass.data[const.DOMAIN]["master"]
|
||||
entity_registry.async_remove(entity.entity_id)
|
||||
self.hass.data[const.DOMAIN]["master"] = None
|
||||
else:
|
||||
entity = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
entity_registry.async_remove(entity.entity_id)
|
||||
self.hass.data[const.DOMAIN]["areas"].pop(area_id, None)
|
||||
|
||||
def async_get_sensor_groups(self):
|
||||
"""Fetch a list of sensor groups (websocket API hook)."""
|
||||
groups = self.store.async_get_sensor_groups()
|
||||
return list(groups.values())
|
||||
|
||||
def async_get_group_for_sensor(self, entity_id: str):
|
||||
"""Fetch the group ID for a given sensor."""
|
||||
groups = self.async_get_sensor_groups()
|
||||
result = next((el for el in groups if entity_id in el[ATTR_ENTITIES]), None)
|
||||
return result["group_id"] if result else None
|
||||
|
||||
def assign_sensor_to_group(self, entity_id: str, group_id: str):
|
||||
"""Assign a sensor to a group."""
|
||||
updated = False
|
||||
old_group = self.async_get_group_for_sensor(entity_id)
|
||||
if old_group and group_id != old_group:
|
||||
# remove sensor from group
|
||||
el = self.store.async_get_sensor_group(old_group)
|
||||
if len(el[ATTR_ENTITIES]) > 2:
|
||||
self.store.async_update_sensor_group(
|
||||
old_group,
|
||||
{ATTR_ENTITIES: [x for x in el[ATTR_ENTITIES] if x != entity_id]},
|
||||
)
|
||||
else:
|
||||
self.store.async_delete_sensor_group(old_group)
|
||||
updated = True
|
||||
if group_id:
|
||||
# add sensor to group
|
||||
group = self.store.async_get_sensor_group(group_id)
|
||||
if not group:
|
||||
_LOGGER.error(
|
||||
"Failed to assign entity %s to group %s",
|
||||
entity_id,
|
||||
group_id,
|
||||
)
|
||||
elif entity_id not in group[ATTR_ENTITIES]:
|
||||
self.store.async_update_sensor_group(
|
||||
group_id, {ATTR_ENTITIES: group[ATTR_ENTITIES] + [entity_id]}
|
||||
)
|
||||
updated = True
|
||||
if updated:
|
||||
async_dispatcher_send(self.hass, "alarmo_sensors_updated")
|
||||
|
||||
def async_update_sensor_group_config(
|
||||
self,
|
||||
group_id: str | None = None,
|
||||
data: dict = {},
|
||||
):
|
||||
"""Update sensor group configuration."""
|
||||
if const.ATTR_REMOVE in data:
|
||||
self.store.async_delete_sensor_group(group_id)
|
||||
elif not group_id:
|
||||
self.store.async_create_sensor_group(data)
|
||||
else:
|
||||
self.store.async_update_sensor_group(group_id, data)
|
||||
|
||||
async_dispatcher_send(self.hass, "alarmo_sensors_updated")
|
||||
|
||||
async def async_unload(self):
|
||||
"""Remove all alarmo objects."""
|
||||
# remove alarm_control_panel entities
|
||||
areas = list(self.hass.data[const.DOMAIN]["areas"].keys())
|
||||
for area in areas:
|
||||
await self.async_remove_entity(area)
|
||||
if self.hass.data[const.DOMAIN]["master"]:
|
||||
await self.async_remove_entity("master")
|
||||
|
||||
del self.hass.data[const.DOMAIN]["sensor_handler"]
|
||||
del self.hass.data[const.DOMAIN]["automation_handler"]
|
||||
del self.hass.data[const.DOMAIN]["mqtt_handler"]
|
||||
del self.hass.data[const.DOMAIN]["event_handler"]
|
||||
|
||||
# remove subscriptions for coordinator
|
||||
while len(self._subscriptions):
|
||||
self._subscriptions.pop()()
|
||||
|
||||
async def async_delete_config(self):
|
||||
"""Wipe alarmo storage."""
|
||||
await self.store.async_delete()
|
||||
|
||||
|
||||
@callback
|
||||
def register_services(hass):
|
||||
"""Register services used by alarmo component."""
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
|
||||
async def async_srv_toggle_user(call):
|
||||
"""Enable a user by service call."""
|
||||
name = call.data.get(ATTR_NAME)
|
||||
enable = True if call.service == const.SERVICE_ENABLE_USER else False
|
||||
users = coordinator.store.async_get_users()
|
||||
user = next(
|
||||
(item for item in list(users.values()) if item[ATTR_NAME] == name), None
|
||||
)
|
||||
if user is None:
|
||||
_LOGGER.warning(
|
||||
"Failed to %s user, no match for name '%s'",
|
||||
"enable" if enable else "disable",
|
||||
name,
|
||||
)
|
||||
return
|
||||
|
||||
coordinator.store.async_update_user(
|
||||
user[const.ATTR_USER_ID], {const.ATTR_ENABLED: enable}
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"User user '%s' was %s", name, "enabled" if enable else "disabled"
|
||||
)
|
||||
|
||||
async_register_admin_service(
|
||||
hass,
|
||||
const.DOMAIN,
|
||||
const.SERVICE_ENABLE_USER,
|
||||
async_srv_toggle_user,
|
||||
schema=const.SERVICE_TOGGLE_USER_SCHEMA,
|
||||
)
|
||||
async_register_admin_service(
|
||||
hass,
|
||||
const.DOMAIN,
|
||||
const.SERVICE_DISABLE_USER,
|
||||
async_srv_toggle_user,
|
||||
schema=const.SERVICE_TOGGLE_USER_SCHEMA,
|
||||
)
|
||||
BIN
custom_components/alarmo/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/automations.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/automations.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/card.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/card.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/config_flow.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/config_flow.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/const.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/const.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/event.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/event.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/helpers.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/helpers.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/mqtt.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/mqtt.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/panel.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/panel.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/sensors.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/sensors.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/store.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/store.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/alarmo/__pycache__/websockets.cpython-313.pyc
Normal file
BIN
custom_components/alarmo/__pycache__/websockets.cpython-313.pyc
Normal file
Binary file not shown.
1531
custom_components/alarmo/alarm_control_panel.py
Normal file
1531
custom_components/alarmo/alarm_control_panel.py
Normal file
File diff suppressed because it is too large
Load Diff
380
custom_components/alarmo/automations.py
Normal file
380
custom_components/alarmo/automations.py
Normal file
@@ -0,0 +1,380 @@
|
||||
"""Automations."""
|
||||
|
||||
import re
|
||||
import copy
|
||||
import logging
|
||||
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_TYPE,
|
||||
ATTR_SERVICE,
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_SERVICE_DATA,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.template import Template, is_template_string
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.translation import async_get_translations
|
||||
from homeassistant.components.binary_sensor.device_condition import (
|
||||
ENTITY_CONDITIONS,
|
||||
)
|
||||
|
||||
from . import const
|
||||
from .helpers import (
|
||||
friendly_name_for_entity_id,
|
||||
)
|
||||
from .sensors import (
|
||||
STATE_OPEN,
|
||||
STATE_CLOSED,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from .alarm_control_panel import AlarmoBaseEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
EVENT_ARM_FAILURE = "arm_failure"
|
||||
|
||||
|
||||
def validate_area(trigger, area_id, hass):
|
||||
"""Validate area for trigger."""
|
||||
if const.ATTR_AREA not in trigger:
|
||||
return False
|
||||
elif trigger[const.ATTR_AREA]:
|
||||
return trigger[const.ATTR_AREA] == area_id
|
||||
elif len(hass.data[const.DOMAIN]["areas"]) == 1:
|
||||
return True
|
||||
else:
|
||||
return area_id is None
|
||||
|
||||
|
||||
def validate_modes(trigger, mode):
|
||||
"""Validate modes for trigger."""
|
||||
if const.ATTR_MODES not in trigger:
|
||||
return False
|
||||
elif not trigger[const.ATTR_MODES]:
|
||||
return True
|
||||
else:
|
||||
return mode in trigger[const.ATTR_MODES]
|
||||
|
||||
|
||||
def validate_trigger(trigger, to_state, from_state=None):
|
||||
"""Validate trigger condition."""
|
||||
if const.ATTR_EVENT not in trigger:
|
||||
return False
|
||||
elif trigger[const.ATTR_EVENT] == "untriggered" and from_state == "triggered":
|
||||
return True
|
||||
elif trigger[const.ATTR_EVENT] == to_state:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class AutomationHandler:
|
||||
"""Handle automations."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant):
|
||||
"""Initialize automation handler."""
|
||||
self.hass = hass
|
||||
self._config = None
|
||||
self._subscriptions = []
|
||||
self._sensorTranslationCache = {}
|
||||
self._alarmTranslationCache = {}
|
||||
self._sensorTranslationLang = None
|
||||
self._alarmTranslationLang = None
|
||||
|
||||
def async_update_config():
|
||||
"""Automation config updated, reload the configuration."""
|
||||
self._config = self.hass.data[const.DOMAIN][
|
||||
"coordinator"
|
||||
].store.async_get_automations()
|
||||
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(
|
||||
hass, "alarmo_automations_updated", async_update_config
|
||||
)
|
||||
)
|
||||
async_update_config()
|
||||
|
||||
@callback
|
||||
async def async_alarm_state_changed(
|
||||
area_id: str, old_state: str, new_state: str
|
||||
):
|
||||
if not old_state:
|
||||
# ignore automations at startup/restoring
|
||||
return
|
||||
|
||||
if area_id:
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
else:
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["master"]
|
||||
|
||||
if not alarm_entity:
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"state of %s is updated from %s to %s",
|
||||
alarm_entity.entity_id,
|
||||
old_state,
|
||||
new_state,
|
||||
)
|
||||
|
||||
if new_state in const.ARM_MODES:
|
||||
# we don't distinguish between armed modes for automations
|
||||
# they are handled separately
|
||||
new_state = "armed"
|
||||
|
||||
for automation_id, config in self._config.items():
|
||||
if not config[const.ATTR_ENABLED]:
|
||||
continue
|
||||
for trigger in config[const.ATTR_TRIGGERS]:
|
||||
if (
|
||||
validate_area(trigger, area_id, self.hass)
|
||||
and validate_modes(trigger, alarm_entity._arm_mode)
|
||||
and validate_trigger(trigger, new_state, old_state)
|
||||
):
|
||||
await self.async_execute_automation(automation_id, alarm_entity)
|
||||
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(
|
||||
self.hass, "alarmo_state_updated", async_alarm_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
async def async_handle_event(event: str, area_id: str, args: dict = {}):
|
||||
if event != const.EVENT_FAILED_TO_ARM:
|
||||
return
|
||||
if area_id:
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
else:
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["master"]
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s has failed to arm",
|
||||
alarm_entity.entity_id,
|
||||
)
|
||||
|
||||
for automation_id, config in self._config.items():
|
||||
if not config[const.ATTR_ENABLED]:
|
||||
continue
|
||||
for trigger in config[const.ATTR_TRIGGERS]:
|
||||
if (
|
||||
validate_area(trigger, area_id, self.hass)
|
||||
and validate_modes(trigger, alarm_entity._arm_mode)
|
||||
and validate_trigger(trigger, EVENT_ARM_FAILURE)
|
||||
):
|
||||
await self.async_execute_automation(automation_id, alarm_entity)
|
||||
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(self.hass, "alarmo_event", async_handle_event)
|
||||
)
|
||||
|
||||
def __del__(self):
|
||||
"""Prepare for removal."""
|
||||
while len(self._subscriptions):
|
||||
self._subscriptions.pop()()
|
||||
|
||||
async def async_execute_automation(
|
||||
self, automation_id: str, alarm_entity: AlarmoBaseEntity
|
||||
):
|
||||
"""Execute the specified automation."""
|
||||
# automation is a dict of AutomationEntry
|
||||
_LOGGER.debug(
|
||||
"Executing automation %s",
|
||||
automation_id,
|
||||
)
|
||||
|
||||
actions = self._config[automation_id][const.ATTR_ACTIONS]
|
||||
for action in actions:
|
||||
try:
|
||||
service_data = copy.copy(action[CONF_SERVICE_DATA])
|
||||
|
||||
if action.get(ATTR_ENTITY_ID):
|
||||
service_data[ATTR_ENTITY_ID] = action[ATTR_ENTITY_ID]
|
||||
|
||||
if self._config[automation_id][CONF_TYPE] == const.ATTR_NOTIFICATION:
|
||||
# replace wildcards within service_data struct
|
||||
for key, val in service_data.items():
|
||||
if type(val) is str:
|
||||
service_data[key] = await self.replace_wildcards_in_string(
|
||||
val, alarm_entity
|
||||
)
|
||||
elif type(val) is dict:
|
||||
for subkey, subval in service_data[key].items():
|
||||
if type(subval) is str:
|
||||
service_data[key][
|
||||
subkey
|
||||
] = await self.replace_wildcards_in_string(
|
||||
subval, alarm_entity
|
||||
)
|
||||
|
||||
domain, service = action[ATTR_SERVICE].split(".")
|
||||
|
||||
await self.hass.async_create_task(
|
||||
self.hass.services.async_call(
|
||||
domain,
|
||||
service,
|
||||
service_data,
|
||||
blocking=False,
|
||||
context={},
|
||||
)
|
||||
)
|
||||
except HomeAssistantError as e:
|
||||
_LOGGER.error(
|
||||
"Execution of action %s failed, reason: %s",
|
||||
automation_id,
|
||||
e,
|
||||
)
|
||||
|
||||
def get_automations_by_area(self, area_id: str):
|
||||
"""Get automations for specified area."""
|
||||
result = []
|
||||
for automation_id, config in self._config.items():
|
||||
if any(
|
||||
el[const.ATTR_AREA] == area_id for el in config[const.ATTR_TRIGGERS]
|
||||
):
|
||||
result.append(automation_id)
|
||||
|
||||
return result
|
||||
|
||||
async def replace_wildcards_in_string(
|
||||
self, input: str, alarm_entity: AlarmoBaseEntity
|
||||
):
|
||||
"""Look for wildcards in string and replace them with content."""
|
||||
# process wildcard '{{open_sensors}}'
|
||||
res = re.search(r"{{open_sensors(\|lang=([^}]+))?(\|format=short)?}}", input)
|
||||
if res:
|
||||
lang = res.group(2) if res.group(2) else "en"
|
||||
names_only = True if res.group(3) else False
|
||||
|
||||
open_sensors = ""
|
||||
if alarm_entity.open_sensors:
|
||||
parts = []
|
||||
for entity_id, status in alarm_entity.open_sensors.items():
|
||||
if names_only:
|
||||
parts.append(friendly_name_for_entity_id(entity_id, self.hass))
|
||||
else:
|
||||
parts.append(
|
||||
await self.async_get_open_sensor_string(
|
||||
entity_id, status, lang
|
||||
)
|
||||
)
|
||||
open_sensors = ", ".join(parts)
|
||||
input = input.replace(res.group(0), open_sensors)
|
||||
|
||||
# process wildcard '{{bypassed_sensors}}'
|
||||
if "{{bypassed_sensors}}" in input:
|
||||
bypassed_sensors = ""
|
||||
if alarm_entity.bypassed_sensors and len(alarm_entity.bypassed_sensors):
|
||||
parts = []
|
||||
for entity_id in alarm_entity.bypassed_sensors:
|
||||
name = friendly_name_for_entity_id(entity_id, self.hass)
|
||||
parts.append(name)
|
||||
bypassed_sensors = ", ".join(parts)
|
||||
input = input.replace("{{bypassed_sensors}}", bypassed_sensors)
|
||||
|
||||
# process wildcard '{{arm_mode}}'
|
||||
res = re.search(r"{{arm_mode(\|lang=([^}]+))?}}", input)
|
||||
if res:
|
||||
lang = res.group(2) if res.group(2) else "en"
|
||||
arm_mode = await self.async_get_arm_mode_string(alarm_entity.arm_mode, lang)
|
||||
|
||||
input = input.replace(res.group(0), arm_mode)
|
||||
|
||||
# process wildcard '{{changed_by}}'
|
||||
if "{{changed_by}}" in input:
|
||||
changed_by = alarm_entity.changed_by if alarm_entity.changed_by else ""
|
||||
input = input.replace("{{changed_by}}", changed_by)
|
||||
|
||||
# process wildcard '{{delay}}'
|
||||
if "{{delay}}" in input:
|
||||
delay = str(alarm_entity.delay) if alarm_entity.delay else ""
|
||||
input = input.replace("{{delay}}", delay)
|
||||
|
||||
# process HA templates
|
||||
if is_template_string(input):
|
||||
input = Template(input, self.hass).async_render()
|
||||
|
||||
return input
|
||||
|
||||
async def async_get_open_sensor_string(
|
||||
self, entity_id: str, state: str, language: str
|
||||
):
|
||||
"""Get translation for sensor states."""
|
||||
if self._sensorTranslationCache and self._sensorTranslationLang == language:
|
||||
translations = self._sensorTranslationCache
|
||||
else:
|
||||
translations = await async_get_translations(
|
||||
self.hass, language, "device_automation", ["binary_sensor"]
|
||||
)
|
||||
|
||||
self._sensorTranslationCache = translations
|
||||
self._sensorTranslationLang = language
|
||||
|
||||
entity = self.hass.states.get(entity_id)
|
||||
|
||||
device_type = (
|
||||
entity.attributes["device_class"]
|
||||
if entity and "device_class" in entity.attributes
|
||||
else None
|
||||
)
|
||||
|
||||
if state == STATE_OPEN:
|
||||
translation_key = (
|
||||
f"component.binary_sensor.device_automation.condition_type.{ENTITY_CONDITIONS[device_type][0]['type']}"
|
||||
if device_type in ENTITY_CONDITIONS
|
||||
else None
|
||||
)
|
||||
if translation_key and translation_key in translations:
|
||||
string = translations[translation_key]
|
||||
else:
|
||||
string = "{entity_name} is open"
|
||||
elif state == STATE_CLOSED:
|
||||
translation_key = (
|
||||
f"component.binary_sensor.device_automation.condition_type.{ENTITY_CONDITIONS[device_type][1]['type']}"
|
||||
if device_type in ENTITY_CONDITIONS
|
||||
else None
|
||||
)
|
||||
if translation_key and translation_key in translations:
|
||||
string = translations[translation_key]
|
||||
else:
|
||||
string = "{entity_name} is closed"
|
||||
|
||||
elif state == STATE_UNAVAILABLE:
|
||||
string = "{entity_name} is unavailable"
|
||||
|
||||
else:
|
||||
string = "{entity_name} is unknown"
|
||||
|
||||
name = friendly_name_for_entity_id(entity_id, self.hass)
|
||||
string = string.replace("{entity_name}", name)
|
||||
|
||||
return string
|
||||
|
||||
async def async_get_arm_mode_string(self, arm_mode: str, language: str):
|
||||
"""Get translation for alarm arm mode."""
|
||||
if self._alarmTranslationCache and self._alarmTranslationLang == language:
|
||||
translations = self._alarmTranslationCache
|
||||
else:
|
||||
translations = await async_get_translations(
|
||||
self.hass, language, "entity_component", ["alarm_control_panel"]
|
||||
)
|
||||
|
||||
self._alarmTranslationCache = translations
|
||||
self._alarmTranslationLang = language
|
||||
|
||||
translation_key = (
|
||||
f"component.alarm_control_panel.entity_component._.state.{arm_mode}"
|
||||
if arm_mode
|
||||
else None
|
||||
)
|
||||
|
||||
if translation_key and translation_key in translations:
|
||||
return translations[translation_key]
|
||||
elif arm_mode:
|
||||
return " ".join(w.capitalize() for w in arm_mode.split("_"))
|
||||
else:
|
||||
return ""
|
||||
34
custom_components/alarmo/card.py
Normal file
34
custom_components/alarmo/card.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""WebSocket handler and registration for Alarmo card update events."""
|
||||
|
||||
import voluptuous as vol
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.components.websocket_api import decorators, async_register_command
|
||||
|
||||
|
||||
@decorators.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "alarmo_updated",
|
||||
}
|
||||
)
|
||||
@decorators.async_response
|
||||
async def handle_subscribe_updates(hass, connection, msg):
|
||||
"""Handle subscribe updates."""
|
||||
|
||||
@callback
|
||||
def handle_event(event: str, area_id: str, args: dict = {}):
|
||||
"""Forward events to websocket."""
|
||||
data = dict(**args, **{"event": event, "area_id": area_id})
|
||||
connection.send_message(
|
||||
{"id": msg["id"], "type": "event", "event": {"data": data}}
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_dispatcher_connect(
|
||||
hass, "alarmo_event", handle_event
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
async def async_register_card(hass):
|
||||
"""Publish event to lovelace when alarm changes."""
|
||||
async_register_command(hass, handle_subscribe_updates)
|
||||
30
custom_components/alarmo/config_flow.py
Normal file
30
custom_components/alarmo/config_flow.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Config flow for the Alarmo component."""
|
||||
|
||||
import secrets
|
||||
|
||||
from homeassistant import config_entries
|
||||
|
||||
from .const import (
|
||||
NAME,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
class AlarmoConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Alarmo."""
|
||||
|
||||
VERSION = "1.0.0"
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user."""
|
||||
# Only a single instance of the integration
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
id = secrets.token_hex(6)
|
||||
|
||||
await self.async_set_unique_id(id)
|
||||
self._abort_if_unique_id_configured(updates=user_input)
|
||||
|
||||
return self.async_create_entry(title=NAME, data={})
|
||||
234
custom_components/alarmo/const.py
Normal file
234
custom_components/alarmo/const.py
Normal file
@@ -0,0 +1,234 @@
|
||||
"""Store constants."""
|
||||
|
||||
import datetime
|
||||
|
||||
import voluptuous as vol
|
||||
from homeassistant.const import (
|
||||
ATTR_NAME,
|
||||
CONF_CODE,
|
||||
CONF_MODE,
|
||||
ATTR_ENTITY_ID,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelState,
|
||||
AlarmControlPanelEntityFeature,
|
||||
)
|
||||
|
||||
VERSION = "1.10.13"
|
||||
NAME = "Alarmo"
|
||||
MANUFACTURER = "@nielsfaber"
|
||||
|
||||
DOMAIN = "alarmo"
|
||||
|
||||
CUSTOM_COMPONENTS = "custom_components"
|
||||
INTEGRATION_FOLDER = DOMAIN
|
||||
PANEL_FOLDER = "frontend"
|
||||
PANEL_FILENAME = "dist/alarm-panel.js"
|
||||
|
||||
PANEL_URL = "/api/panel_custom/alarmo"
|
||||
PANEL_TITLE = NAME
|
||||
PANEL_ICON = "mdi:shield-home"
|
||||
PANEL_NAME = "alarm-panel"
|
||||
|
||||
INITIALIZATION_TIME = datetime.timedelta(seconds=60)
|
||||
SENSOR_ARM_TIME = datetime.timedelta(seconds=5)
|
||||
|
||||
STATES = [
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelState.DISARMED,
|
||||
AlarmControlPanelState.TRIGGERED,
|
||||
AlarmControlPanelState.PENDING,
|
||||
AlarmControlPanelState.ARMING,
|
||||
]
|
||||
|
||||
ARM_MODES = [
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
]
|
||||
|
||||
ARM_MODE_TO_STATE = {
|
||||
"away": AlarmControlPanelState.ARMED_AWAY,
|
||||
"home": AlarmControlPanelState.ARMED_HOME,
|
||||
"night": AlarmControlPanelState.ARMED_NIGHT,
|
||||
"custom": AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
"vacation": AlarmControlPanelState.ARMED_VACATION,
|
||||
}
|
||||
|
||||
STATE_TO_ARM_MODE = {
|
||||
AlarmControlPanelState.ARMED_AWAY: "away",
|
||||
AlarmControlPanelState.ARMED_HOME: "home",
|
||||
AlarmControlPanelState.ARMED_NIGHT: "night",
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS: "custom",
|
||||
AlarmControlPanelState.ARMED_VACATION: "vacation",
|
||||
}
|
||||
|
||||
COMMAND_ARM_NIGHT = "arm_night"
|
||||
COMMAND_ARM_AWAY = "arm_away"
|
||||
COMMAND_ARM_HOME = "arm_home"
|
||||
COMMAND_ARM_CUSTOM_BYPASS = "arm_custom_bypass"
|
||||
COMMAND_ARM_VACATION = "arm_vacation"
|
||||
COMMAND_DISARM = "disarm"
|
||||
|
||||
COMMANDS = [
|
||||
COMMAND_DISARM,
|
||||
COMMAND_ARM_AWAY,
|
||||
COMMAND_ARM_NIGHT,
|
||||
COMMAND_ARM_HOME,
|
||||
COMMAND_ARM_CUSTOM_BYPASS,
|
||||
COMMAND_ARM_VACATION,
|
||||
]
|
||||
|
||||
EVENT_DISARM = "disarm"
|
||||
EVENT_LEAVE = "leave"
|
||||
EVENT_ARM = "arm"
|
||||
EVENT_ENTRY = "entry"
|
||||
EVENT_TRIGGER = "trigger"
|
||||
EVENT_FAILED_TO_ARM = "failed_to_arm"
|
||||
EVENT_COMMAND_NOT_ALLOWED = "command_not_allowed"
|
||||
EVENT_INVALID_CODE_PROVIDED = "invalid_code_provided"
|
||||
EVENT_NO_CODE_PROVIDED = "no_code_provided"
|
||||
EVENT_TRIGGER_TIME_EXPIRED = "trigger_time_expired"
|
||||
EVENT_READY_TO_ARM_MODES_CHANGED = "ready_to_arm_modes_changed"
|
||||
|
||||
ATTR_MODES = "modes"
|
||||
ATTR_ARM_MODE = "arm_mode"
|
||||
ATTR_CODE_DISARM_REQUIRED = "code_disarm_required"
|
||||
ATTR_CODE_MODE_CHANGE_REQUIRED = "code_mode_change_required"
|
||||
ATTR_REMOVE = "remove"
|
||||
ATTR_OLD_CODE = "old_code"
|
||||
|
||||
ATTR_TRIGGER_TIME = "trigger_time"
|
||||
ATTR_EXIT_TIME = "exit_time"
|
||||
ATTR_ENTRY_TIME = "entry_time"
|
||||
|
||||
ATTR_ENABLED = "enabled"
|
||||
ATTR_USER_ID = "user_id"
|
||||
|
||||
ATTR_CAN_ARM = "can_arm"
|
||||
ATTR_CAN_DISARM = "can_disarm"
|
||||
ATTR_DISARM_AFTER_TRIGGER = "disarm_after_trigger"
|
||||
ATTR_IGNORE_BLOCKING_SENSORS_AFTER_TRIGGER = "ignore_blocking_sensors_after_trigger"
|
||||
|
||||
ATTR_REMOVE = "remove"
|
||||
ATTR_IS_OVERRIDE_CODE = "is_override_code"
|
||||
ATTR_AREA_LIMIT = "area_limit"
|
||||
ATTR_CODE_FORMAT = "code_format"
|
||||
ATTR_CODE_LENGTH = "code_length"
|
||||
|
||||
ATTR_AUTOMATION_ID = "automation_id"
|
||||
|
||||
ATTR_TYPE = "type"
|
||||
ATTR_AREA = "area"
|
||||
ATTR_MASTER = "master"
|
||||
|
||||
ATTR_TRIGGERS = "triggers"
|
||||
ATTR_ACTIONS = "actions"
|
||||
ATTR_EVENT = "event"
|
||||
ATTR_REQUIRE_CODE = "require_code"
|
||||
|
||||
ATTR_NOTIFICATION = "notification"
|
||||
ATTR_VERSION = "version"
|
||||
ATTR_STATE_PAYLOAD = "state_payload"
|
||||
ATTR_COMMAND_PAYLOAD = "command_payload"
|
||||
|
||||
ATTR_FORCE = "force"
|
||||
ATTR_SKIP_DELAY = "skip_delay"
|
||||
ATTR_CONTEXT_ID = "context_id"
|
||||
|
||||
PUSH_EVENT = "mobile_app_notification_action"
|
||||
|
||||
EVENT_ACTION_FORCE_ARM = "ALARMO_FORCE_ARM"
|
||||
EVENT_ACTION_RETRY_ARM = "ALARMO_RETRY_ARM"
|
||||
EVENT_ACTION_DISARM = "ALARMO_DISARM"
|
||||
EVENT_ACTION_ARM_AWAY = "ALARMO_ARM_AWAY"
|
||||
EVENT_ACTION_ARM_HOME = "ALARMO_ARM_HOME"
|
||||
EVENT_ACTION_ARM_NIGHT = "ALARMO_ARM_NIGHT"
|
||||
EVENT_ACTION_ARM_VACATION = "ALARMO_ARM_VACATION"
|
||||
EVENT_ACTION_ARM_CUSTOM_BYPASS = "ALARMO_ARM_CUSTOM_BYPASS"
|
||||
|
||||
EVENT_ACTIONS = [
|
||||
EVENT_ACTION_FORCE_ARM,
|
||||
EVENT_ACTION_RETRY_ARM,
|
||||
EVENT_ACTION_DISARM,
|
||||
EVENT_ACTION_ARM_AWAY,
|
||||
EVENT_ACTION_ARM_HOME,
|
||||
EVENT_ACTION_ARM_NIGHT,
|
||||
EVENT_ACTION_ARM_VACATION,
|
||||
EVENT_ACTION_ARM_CUSTOM_BYPASS,
|
||||
]
|
||||
|
||||
MODES_TO_SUPPORTED_FEATURES = {
|
||||
AlarmControlPanelState.ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY,
|
||||
AlarmControlPanelState.ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME,
|
||||
AlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, # noqa: E501
|
||||
AlarmControlPanelState.ARMED_VACATION: AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
}
|
||||
|
||||
SERVICE_ARM = "arm"
|
||||
SERVICE_DISARM = "disarm"
|
||||
SERVICE_SKIP_DELAY = "skip_delay"
|
||||
|
||||
CONF_ALARM_ARMED_AWAY = "armed_away"
|
||||
CONF_ALARM_ARMED_CUSTOM_BYPASS = "armed_custom_bypass"
|
||||
CONF_ALARM_ARMED_HOME = "armed_home"
|
||||
CONF_ALARM_ARMED_NIGHT = "armed_night"
|
||||
CONF_ALARM_ARMED_VACATION = "armed_vacation"
|
||||
CONF_ALARM_ARMING = "arming"
|
||||
CONF_ALARM_DISARMED = "disarmed"
|
||||
CONF_ALARM_PENDING = "pending"
|
||||
CONF_ALARM_TRIGGERED = "triggered"
|
||||
|
||||
SERVICE_ARM_SCHEMA = cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_CODE, default=""): cv.string,
|
||||
vol.Optional(CONF_MODE, default=AlarmControlPanelState.ARMED_AWAY): vol.In(
|
||||
[
|
||||
"away",
|
||||
"home",
|
||||
"night",
|
||||
"custom",
|
||||
"vacation",
|
||||
CONF_ALARM_ARMED_AWAY,
|
||||
CONF_ALARM_ARMED_HOME,
|
||||
CONF_ALARM_ARMED_NIGHT,
|
||||
CONF_ALARM_ARMED_CUSTOM_BYPASS,
|
||||
CONF_ALARM_ARMED_VACATION,
|
||||
]
|
||||
),
|
||||
vol.Optional(ATTR_SKIP_DELAY, default=False): cv.boolean,
|
||||
vol.Optional(ATTR_FORCE, default=False): cv.boolean,
|
||||
vol.Optional(ATTR_CONTEXT_ID): int,
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_DISARM_SCHEMA = cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_CODE, default=""): cv.string,
|
||||
vol.Optional(ATTR_CONTEXT_ID): int,
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_SKIP_DELAY_SCHEMA = cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_ENABLE_USER = "enable_user"
|
||||
SERVICE_DISABLE_USER = "disable_user"
|
||||
SERVICE_TOGGLE_USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME, default=""): cv.string,
|
||||
}
|
||||
)
|
||||
89
custom_components/alarmo/event.py
Normal file
89
custom_components/alarmo/event.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""fire events in HA for use with automations."""
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from . import const
|
||||
|
||||
|
||||
class EventHandler:
|
||||
"""Class to handle events from Alarmo and fire HA events."""
|
||||
|
||||
def __init__(self, hass):
|
||||
"""Class constructor."""
|
||||
self.hass = hass
|
||||
self._subscription = async_dispatcher_connect(
|
||||
self.hass, "alarmo_event", self.async_handle_event
|
||||
)
|
||||
|
||||
def __del__(self):
|
||||
"""Class destructor."""
|
||||
self._subscription()
|
||||
|
||||
@callback
|
||||
def async_handle_event(self, event: str, area_id: str, args: dict = {}):
|
||||
"""Handle event."""
|
||||
if area_id:
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
else:
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["master"]
|
||||
|
||||
if event in [
|
||||
const.EVENT_FAILED_TO_ARM,
|
||||
const.EVENT_COMMAND_NOT_ALLOWED,
|
||||
const.EVENT_INVALID_CODE_PROVIDED,
|
||||
const.EVENT_NO_CODE_PROVIDED,
|
||||
]:
|
||||
reasons = {
|
||||
const.EVENT_FAILED_TO_ARM: "open_sensors",
|
||||
const.EVENT_COMMAND_NOT_ALLOWED: "not_allowed",
|
||||
const.EVENT_INVALID_CODE_PROVIDED: "invalid_code",
|
||||
const.EVENT_NO_CODE_PROVIDED: "invalid_code",
|
||||
}
|
||||
|
||||
data = dict(
|
||||
**args,
|
||||
**{
|
||||
"area_id": area_id,
|
||||
"entity_id": alarm_entity.entity_id,
|
||||
"reason": reasons[event],
|
||||
},
|
||||
)
|
||||
if "open_sensors" in data:
|
||||
data["sensors"] = list(data["open_sensors"].keys())
|
||||
del data["open_sensors"]
|
||||
|
||||
self.hass.bus.async_fire("alarmo_failed_to_arm", data)
|
||||
|
||||
elif event in [const.EVENT_ARM, const.EVENT_DISARM]:
|
||||
data = dict(
|
||||
**args,
|
||||
**{
|
||||
"area_id": area_id,
|
||||
"entity_id": alarm_entity.entity_id,
|
||||
"action": event,
|
||||
},
|
||||
)
|
||||
if "arm_mode" in data:
|
||||
data["mode"] = const.STATE_TO_ARM_MODE[data["arm_mode"]]
|
||||
del data["arm_mode"]
|
||||
|
||||
self.hass.bus.async_fire("alarmo_command_success", data)
|
||||
|
||||
elif event == const.EVENT_READY_TO_ARM_MODES_CHANGED:
|
||||
supported_modes = dict(
|
||||
filter(
|
||||
lambda el: el[1] & alarm_entity.supported_features,
|
||||
const.MODES_TO_SUPPORTED_FEATURES.items(),
|
||||
)
|
||||
)
|
||||
modes = {
|
||||
k.value: (k.value in args["modes"]) for k in supported_modes.keys()
|
||||
}
|
||||
data = {
|
||||
"area_id": area_id,
|
||||
"entity_id": alarm_entity.entity_id,
|
||||
**modes,
|
||||
}
|
||||
|
||||
self.hass.bus.async_fire("alarmo_ready_to_arm_modes_updated", data)
|
||||
3288
custom_components/alarmo/frontend/dist/alarm-panel.js
vendored
Normal file
3288
custom_components/alarmo/frontend/dist/alarm-panel.js
vendored
Normal file
File diff suppressed because one or more lines are too long
19
custom_components/alarmo/helpers.py
Normal file
19
custom_components/alarmo/helpers.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Helper functions for Alarmo integration."""
|
||||
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
)
|
||||
|
||||
|
||||
def friendly_name_for_entity_id(entity_id: str, hass: HomeAssistant):
|
||||
"""Helper to get friendly name for entity."""
|
||||
state = hass.states.get(entity_id)
|
||||
if state and state.attributes.get("friendly_name"):
|
||||
return state.attributes["friendly_name"]
|
||||
|
||||
return entity_id
|
||||
|
||||
|
||||
def omit(obj: dict, blacklisted_keys: list):
|
||||
"""Helper to omit blacklisted keys from a dict."""
|
||||
return {key: val for key, val in obj.items() if key not in blacklisted_keys}
|
||||
8
custom_components/alarmo/icons.json
Normal file
8
custom_components/alarmo/icons.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"services": {
|
||||
"arm": "mdi:shield-lock",
|
||||
"disarm": "mdi:shield-off",
|
||||
"enable_user": "mdi:account-lock-open",
|
||||
"disable_user": "mdi:account-lock-closed"
|
||||
}
|
||||
}
|
||||
21
custom_components/alarmo/manifest.json
Normal file
21
custom_components/alarmo/manifest.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"domain": "alarmo",
|
||||
"name": "Alarmo",
|
||||
"after_dependencies": [
|
||||
"mqtt",
|
||||
"notify"
|
||||
],
|
||||
"codeowners": [
|
||||
"@nielsfaber"
|
||||
],
|
||||
"config_flow": true,
|
||||
"dependencies": [
|
||||
"http",
|
||||
"panel_custom"
|
||||
],
|
||||
"documentation": "https://github.com/nielsfaber/alarmo",
|
||||
"iot_class": "local_push",
|
||||
"issue_tracker": "https://github.com/nielsfaber/alarmo/issues",
|
||||
"requirements": [],
|
||||
"version": "1.10.13"
|
||||
}
|
||||
319
custom_components/alarmo/mqtt.py
Normal file
319
custom_components/alarmo/mqtt.py
Normal file
@@ -0,0 +1,319 @@
|
||||
"""Class to handle MQTT integration."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
from homeassistant.components.mqtt import (
|
||||
DOMAIN as ATTR_MQTT,
|
||||
)
|
||||
from homeassistant.components.mqtt import (
|
||||
CONF_STATE_TOPIC,
|
||||
CONF_COMMAND_TOPIC,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from . import const
|
||||
from .helpers import (
|
||||
friendly_name_for_entity_id,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
CONF_EVENT_TOPIC = "event_topic"
|
||||
|
||||
|
||||
class MqttHandler:
|
||||
"""Class to handle MQTT integration."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant): # noqa: PLR0915
|
||||
"""Class constructor."""
|
||||
self.hass = hass
|
||||
self._config = None
|
||||
self._subscribed_topics = []
|
||||
self._subscriptions = []
|
||||
|
||||
@callback
|
||||
def async_update_config(_args=None):
|
||||
"""Mqtt config updated, reload the configuration."""
|
||||
old_config = self._config
|
||||
new_config = self.hass.data[const.DOMAIN][
|
||||
"coordinator"
|
||||
].store.async_get_config()
|
||||
|
||||
if old_config and old_config[ATTR_MQTT] == new_config[ATTR_MQTT]:
|
||||
# only update MQTT config if some parameters are changed
|
||||
return
|
||||
|
||||
self._config = new_config
|
||||
|
||||
if (
|
||||
not old_config
|
||||
or old_config[ATTR_MQTT][CONF_COMMAND_TOPIC]
|
||||
!= new_config[ATTR_MQTT][CONF_COMMAND_TOPIC]
|
||||
):
|
||||
# re-subscribing is only needed if the command topic has changed
|
||||
self.hass.add_job(self._async_subscribe_topics())
|
||||
|
||||
_LOGGER.debug("MQTT config was (re)loaded")
|
||||
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(hass, "alarmo_config_updated", async_update_config)
|
||||
)
|
||||
async_update_config()
|
||||
|
||||
@callback
|
||||
def async_alarm_state_changed(area_id: str, old_state: str, new_state: str):
|
||||
if not self._config[ATTR_MQTT][const.ATTR_ENABLED]:
|
||||
return
|
||||
|
||||
topic = self._config[ATTR_MQTT][CONF_STATE_TOPIC]
|
||||
|
||||
if not topic: # do not publish if no topic is provided
|
||||
return
|
||||
|
||||
if area_id and len(self.hass.data[const.DOMAIN]["areas"]) > 1:
|
||||
# handle the sending of a state update for a specific area
|
||||
area = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
topic = topic.rsplit("/", 1)
|
||||
topic.insert(1, slugify(area.name))
|
||||
topic = "/".join(topic)
|
||||
|
||||
payload_config = self._config[ATTR_MQTT][const.ATTR_STATE_PAYLOAD]
|
||||
if payload_config.get(new_state):
|
||||
message = payload_config[new_state]
|
||||
else:
|
||||
message = new_state
|
||||
|
||||
hass.async_create_task(
|
||||
mqtt.async_publish(self.hass, topic, message, retain=True)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Published state '%s' on topic '%s'",
|
||||
message,
|
||||
topic,
|
||||
)
|
||||
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(
|
||||
self.hass, "alarmo_state_updated", async_alarm_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_handle_event(event: str, area_id: str, args: dict = {}):
|
||||
if not self._config[ATTR_MQTT][const.ATTR_ENABLED]:
|
||||
return
|
||||
|
||||
topic = self._config[ATTR_MQTT][CONF_EVENT_TOPIC]
|
||||
|
||||
if not topic: # do not publish if no topic is provided
|
||||
return
|
||||
|
||||
if area_id and len(self.hass.data[const.DOMAIN]["areas"]) > 1:
|
||||
# handle the sending of a state update for a specific area
|
||||
area = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
topic = topic.rsplit("/", 1)
|
||||
topic.insert(1, slugify(area.name))
|
||||
topic = "/".join(topic)
|
||||
|
||||
if event == const.EVENT_ARM:
|
||||
payload = {
|
||||
"event": f"{event.upper()}_{args['arm_mode'].split('_', 1).pop(1).upper()}", # noqa: E501
|
||||
"delay": args["delay"],
|
||||
}
|
||||
elif event == const.EVENT_TRIGGER:
|
||||
payload = {
|
||||
"event": event.upper(),
|
||||
"delay": args["delay"],
|
||||
"sensors": [
|
||||
{
|
||||
"entity_id": entity,
|
||||
"name": friendly_name_for_entity_id(entity, self.hass),
|
||||
}
|
||||
for (entity, state) in args["open_sensors"].items()
|
||||
],
|
||||
}
|
||||
elif event == const.EVENT_FAILED_TO_ARM:
|
||||
payload = {
|
||||
"event": event.upper(),
|
||||
"sensors": [
|
||||
{
|
||||
"entity_id": entity,
|
||||
"name": friendly_name_for_entity_id(entity, self.hass),
|
||||
}
|
||||
for (entity, state) in args["open_sensors"].items()
|
||||
],
|
||||
}
|
||||
elif event == const.EVENT_COMMAND_NOT_ALLOWED:
|
||||
payload = {
|
||||
"event": event.upper(),
|
||||
"state": args["state"],
|
||||
"command": args["command"].upper(),
|
||||
}
|
||||
elif event in [
|
||||
const.EVENT_INVALID_CODE_PROVIDED,
|
||||
const.EVENT_NO_CODE_PROVIDED,
|
||||
]:
|
||||
payload = {"event": event.upper()}
|
||||
else:
|
||||
return
|
||||
|
||||
payload = json.dumps(payload, cls=JSONEncoder)
|
||||
hass.async_create_task(mqtt.async_publish(self.hass, topic, payload))
|
||||
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(self.hass, "alarmo_event", async_handle_event)
|
||||
)
|
||||
|
||||
def __del__(self):
|
||||
"""Prepare for removal."""
|
||||
while len(self._subscribed_topics):
|
||||
self._subscribed_topics.pop()()
|
||||
while len(self._subscriptions):
|
||||
self._subscriptions.pop()()
|
||||
|
||||
async def _async_subscribe_topics(self):
|
||||
"""Install a listener for the command topic."""
|
||||
if len(self._subscribed_topics):
|
||||
while len(self._subscribed_topics):
|
||||
self._subscribed_topics.pop()()
|
||||
_LOGGER.debug("Removed subscribed topics")
|
||||
|
||||
if not self._config[ATTR_MQTT][const.ATTR_ENABLED]:
|
||||
return
|
||||
|
||||
self._subscribed_topics.append(
|
||||
await mqtt.async_subscribe(
|
||||
self.hass,
|
||||
self._config[ATTR_MQTT][CONF_COMMAND_TOPIC],
|
||||
self.async_message_received,
|
||||
)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Subscribed to topic %s",
|
||||
self._config[ATTR_MQTT][CONF_COMMAND_TOPIC],
|
||||
)
|
||||
|
||||
@callback
|
||||
async def async_message_received(self, msg): # noqa: PLR0915, PLR0912
|
||||
"""Handle new MQTT messages."""
|
||||
command = None
|
||||
code = None
|
||||
area = None
|
||||
bypass_open_sensors = False
|
||||
skip_delay = False
|
||||
|
||||
try:
|
||||
payload = json.loads(msg.payload)
|
||||
payload = {k.lower(): v for k, v in payload.items()}
|
||||
|
||||
if "command" in payload:
|
||||
command = payload["command"]
|
||||
elif "cmd" in payload:
|
||||
command = payload["cmd"]
|
||||
elif "action" in payload:
|
||||
command = payload["action"]
|
||||
elif "state" in payload:
|
||||
command = payload["state"]
|
||||
|
||||
if "code" in payload:
|
||||
code = payload["code"]
|
||||
elif "pin" in payload:
|
||||
code = payload["pin"]
|
||||
elif "password" in payload:
|
||||
code = payload["password"]
|
||||
elif "pincode" in payload:
|
||||
code = payload["pincode"]
|
||||
|
||||
if payload.get("area"):
|
||||
area = payload["area"]
|
||||
|
||||
if (payload.get("bypass_open_sensors")) or (payload.get("force")):
|
||||
bypass_open_sensors = payload["bypass_open_sensors"]
|
||||
|
||||
if payload.get(const.ATTR_SKIP_DELAY):
|
||||
skip_delay = payload[const.ATTR_SKIP_DELAY]
|
||||
|
||||
except ValueError:
|
||||
# no JSON structure found
|
||||
command = msg.payload
|
||||
code = None
|
||||
|
||||
if type(command) is str:
|
||||
command = command.lower()
|
||||
else:
|
||||
_LOGGER.warning("Received unexpected command")
|
||||
return
|
||||
|
||||
payload_config = self._config[ATTR_MQTT][const.ATTR_COMMAND_PAYLOAD]
|
||||
skip_code = not self._config[ATTR_MQTT][const.ATTR_REQUIRE_CODE]
|
||||
|
||||
command_payloads = {}
|
||||
for item in const.COMMANDS:
|
||||
if payload_config.get(item):
|
||||
command_payloads[item] = payload_config[item].lower()
|
||||
else:
|
||||
command_payloads[item] = item.lower()
|
||||
|
||||
if command not in list(command_payloads.values()):
|
||||
_LOGGER.warning("Received unexpected command: %s", command)
|
||||
return
|
||||
|
||||
if area:
|
||||
res = list(
|
||||
filter(
|
||||
lambda el: slugify(el.name) == area,
|
||||
self.hass.data[const.DOMAIN]["areas"].values(),
|
||||
)
|
||||
)
|
||||
if not res:
|
||||
_LOGGER.warning(
|
||||
"Area %s does not exist",
|
||||
area,
|
||||
)
|
||||
return
|
||||
entity = res[0]
|
||||
elif (
|
||||
self._config[const.ATTR_MASTER][const.ATTR_ENABLED]
|
||||
and len(self.hass.data[const.DOMAIN]["areas"]) > 1
|
||||
):
|
||||
entity = self.hass.data[const.DOMAIN]["master"]
|
||||
elif len(self.hass.data[const.DOMAIN]["areas"]) == 1:
|
||||
entity = next(iter(self.hass.data[const.DOMAIN]["areas"].values()))
|
||||
else:
|
||||
_LOGGER.warning("No area specified")
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"Received command %s",
|
||||
command,
|
||||
)
|
||||
|
||||
if command == command_payloads[const.COMMAND_DISARM]:
|
||||
entity.alarm_disarm(code, skip_code=skip_code)
|
||||
elif command == command_payloads[const.COMMAND_ARM_AWAY]:
|
||||
await entity.async_alarm_arm_away(
|
||||
code, skip_code, bypass_open_sensors, skip_delay
|
||||
)
|
||||
elif command == command_payloads[const.COMMAND_ARM_NIGHT]:
|
||||
await entity.async_alarm_arm_night(
|
||||
code, skip_code, bypass_open_sensors, skip_delay
|
||||
)
|
||||
elif command == command_payloads[const.COMMAND_ARM_HOME]:
|
||||
await entity.async_alarm_arm_home(
|
||||
code, skip_code, bypass_open_sensors, skip_delay
|
||||
)
|
||||
elif command == command_payloads[const.COMMAND_ARM_CUSTOM_BYPASS]:
|
||||
await entity.async_alarm_arm_custom_bypass(
|
||||
code, skip_code, bypass_open_sensors, skip_delay
|
||||
)
|
||||
elif command == command_payloads[const.COMMAND_ARM_VACATION]:
|
||||
await entity.async_alarm_arm_vacation(
|
||||
code, skip_code, bypass_open_sensors, skip_delay
|
||||
)
|
||||
50
custom_components/alarmo/panel.py
Normal file
50
custom_components/alarmo/panel.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Panel registration for Alarmo integration."""
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
from homeassistant.components import frontend, panel_custom
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
PANEL_URL,
|
||||
PANEL_ICON,
|
||||
PANEL_NAME,
|
||||
PANEL_TITLE,
|
||||
PANEL_FOLDER,
|
||||
PANEL_FILENAME,
|
||||
CUSTOM_COMPONENTS,
|
||||
INTEGRATION_FOLDER,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_register_panel(hass):
|
||||
"""Register the panel."""
|
||||
root_dir = os.path.join(hass.config.path(CUSTOM_COMPONENTS), INTEGRATION_FOLDER)
|
||||
panel_dir = os.path.join(root_dir, PANEL_FOLDER)
|
||||
view_url = os.path.join(panel_dir, PANEL_FILENAME)
|
||||
|
||||
await hass.http.async_register_static_paths(
|
||||
[StaticPathConfig(PANEL_URL, view_url, cache_headers=False)]
|
||||
)
|
||||
|
||||
await panel_custom.async_register_panel(
|
||||
hass,
|
||||
webcomponent_name=PANEL_NAME,
|
||||
frontend_url_path=DOMAIN,
|
||||
module_url=PANEL_URL,
|
||||
sidebar_title=PANEL_TITLE,
|
||||
sidebar_icon=PANEL_ICON,
|
||||
require_admin=True,
|
||||
config={},
|
||||
config_panel_domain=DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
def async_unregister_panel(hass):
|
||||
"""Unregister the panel."""
|
||||
frontend.async_remove_panel(hass, DOMAIN)
|
||||
_LOGGER.debug("Removing panel")
|
||||
680
custom_components/alarmo/sensors.py
Normal file
680
custom_components/alarmo/sensors.py
Normal file
@@ -0,0 +1,680 @@
|
||||
"""Sensor handling for Alarmo integration."""
|
||||
|
||||
import logging
|
||||
from types import SimpleNamespace
|
||||
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.core import (
|
||||
CoreState,
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
STATE_ON,
|
||||
ATTR_NAME,
|
||||
STATE_OFF,
|
||||
ATTR_STATE,
|
||||
STATE_OPEN,
|
||||
STATE_CLOSED,
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
ATTR_LAST_TRIP_TIME,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
)
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.components.lock import LockState
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
)
|
||||
from homeassistant.components.alarm_control_panel import AlarmControlPanelState
|
||||
|
||||
from . import const
|
||||
|
||||
ATTR_USE_EXIT_DELAY = "use_exit_delay"
|
||||
ATTR_USE_ENTRY_DELAY = "use_entry_delay"
|
||||
ATTR_ALWAYS_ON = "always_on"
|
||||
ATTR_ARM_ON_CLOSE = "arm_on_close"
|
||||
ATTR_ALLOW_OPEN = "allow_open"
|
||||
ATTR_TRIGGER_UNAVAILABLE = "trigger_unavailable"
|
||||
ATTR_AUTO_BYPASS = "auto_bypass"
|
||||
ATTR_AUTO_BYPASS_MODES = "auto_bypass_modes"
|
||||
ATTR_GROUP = "group"
|
||||
ATTR_GROUP_ID = "group_id"
|
||||
ATTR_TIMEOUT = "timeout"
|
||||
ATTR_EVENT_COUNT = "event_count"
|
||||
ATTR_ENTITIES = "entities"
|
||||
ATTR_NEW_ENTITY_ID = "new_entity_id"
|
||||
ATTR_ENTRY_DELAY = "entry_delay"
|
||||
|
||||
SENSOR_STATES_OPEN = [STATE_ON, STATE_OPEN, LockState.UNLOCKED]
|
||||
SENSOR_STATES_CLOSED = [STATE_OFF, STATE_CLOSED, LockState.LOCKED]
|
||||
|
||||
|
||||
SENSOR_TYPE_DOOR = "door"
|
||||
SENSOR_TYPE_WINDOW = "window"
|
||||
SENSOR_TYPE_MOTION = "motion"
|
||||
SENSOR_TYPE_TAMPER = "tamper"
|
||||
SENSOR_TYPE_ENVIRONMENTAL = "environmental"
|
||||
SENSOR_TYPE_OTHER = "other"
|
||||
SENSOR_TYPES = [
|
||||
SENSOR_TYPE_DOOR,
|
||||
SENSOR_TYPE_WINDOW,
|
||||
SENSOR_TYPE_MOTION,
|
||||
SENSOR_TYPE_TAMPER,
|
||||
SENSOR_TYPE_ENVIRONMENTAL,
|
||||
SENSOR_TYPE_OTHER,
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse_sensor_state(state):
|
||||
"""Parse the state of a sensor into open/closed/unavailable/unknown."""
|
||||
if not state or not state.state:
|
||||
return STATE_UNAVAILABLE
|
||||
elif state.state == STATE_UNAVAILABLE:
|
||||
return STATE_UNAVAILABLE
|
||||
elif state.state in SENSOR_STATES_OPEN:
|
||||
return STATE_OPEN
|
||||
elif state.state in SENSOR_STATES_CLOSED:
|
||||
return STATE_CLOSED
|
||||
else:
|
||||
return STATE_UNKNOWN
|
||||
|
||||
|
||||
def sensor_state_allowed(state, sensor_config, alarm_state): # noqa: PLR0911
|
||||
"""Return whether the sensor state is permitted or a state change should occur."""
|
||||
if state != STATE_OPEN and (
|
||||
state != STATE_UNAVAILABLE or not sensor_config[ATTR_TRIGGER_UNAVAILABLE]
|
||||
):
|
||||
# sensor has the safe state
|
||||
return True
|
||||
|
||||
elif alarm_state == AlarmControlPanelState.TRIGGERED:
|
||||
# alarm is already triggered
|
||||
return True
|
||||
|
||||
elif sensor_config[ATTR_ALWAYS_ON]:
|
||||
# alarm should always be triggered by always-on sensor
|
||||
return False
|
||||
|
||||
elif (
|
||||
alarm_state == AlarmControlPanelState.ARMING
|
||||
and not sensor_config[ATTR_USE_EXIT_DELAY]
|
||||
):
|
||||
# arming should be aborted if sensor without exit delay is active
|
||||
return False
|
||||
|
||||
elif alarm_state in const.ARM_MODES:
|
||||
# normal triggering case
|
||||
return False
|
||||
|
||||
elif alarm_state == AlarmControlPanelState.PENDING:
|
||||
# Allow both immediate and delayed sensors
|
||||
# during pending for timer shortening/immediate trigger
|
||||
# This enables per-sensor entry delay logic
|
||||
# to process subsequent triggers during countdown
|
||||
return False
|
||||
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
class SensorHandler:
|
||||
"""Class to handle sensors for Alarmo."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant):
|
||||
"""Initialize the sensor handler."""
|
||||
self._config = None
|
||||
self.hass = hass
|
||||
self._state_listener = None
|
||||
self._subscriptions = []
|
||||
self._arm_timers = {}
|
||||
self._groups = {}
|
||||
self._group_events = {}
|
||||
self._startup_complete = False
|
||||
self._unavailable_state_mem = {}
|
||||
|
||||
@callback
|
||||
def async_update_sensor_config():
|
||||
"""Sensor config updated, reload the configuration."""
|
||||
self._config = self.hass.data[const.DOMAIN][
|
||||
"coordinator"
|
||||
].store.async_get_sensors()
|
||||
self._groups = self.hass.data[const.DOMAIN][
|
||||
"coordinator"
|
||||
].store.async_get_sensor_groups()
|
||||
self._group_events = {}
|
||||
self.async_watch_sensor_states()
|
||||
|
||||
# Store the callback for later registration
|
||||
self._async_update_sensor_config = async_update_sensor_config
|
||||
|
||||
@callback
|
||||
def _setup_sensor_listeners():
|
||||
"""Register sensor listeners and perform initial setup."""
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(
|
||||
hass, "alarmo_state_updated", self.async_watch_sensor_states
|
||||
)
|
||||
)
|
||||
self._subscriptions.append(
|
||||
async_dispatcher_connect(
|
||||
hass, "alarmo_sensors_updated", self._async_update_sensor_config
|
||||
)
|
||||
)
|
||||
# Do the initial sensor setup now that HA is running
|
||||
self._async_update_sensor_config()
|
||||
|
||||
# Evaluate initial sensor states for all areas on startup
|
||||
for area_id in self.hass.data[const.DOMAIN]["areas"].keys():
|
||||
self.update_ready_to_arm_status(area_id)
|
||||
# If area is armed, validate sensors and trigger if needed
|
||||
# Schedule this to run in the event loop since it may call async methods
|
||||
hass.async_create_task(
|
||||
self._async_evaluate_armed_state_on_startup(area_id)
|
||||
)
|
||||
|
||||
def handle_startup(_event):
|
||||
self._startup_complete = True
|
||||
# Schedule the setup to run in the event loop (from thread pool executor)
|
||||
hass.loop.call_soon_threadsafe(_setup_sensor_listeners)
|
||||
|
||||
if hass.state == CoreState.running:
|
||||
self._startup_complete = True
|
||||
# Schedule in event loop since we're in __init__ (sync context)
|
||||
hass.loop.call_soon_threadsafe(_setup_sensor_listeners)
|
||||
else:
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, handle_startup)
|
||||
|
||||
def __del__(self):
|
||||
"""Prepare for removal."""
|
||||
if self._state_listener:
|
||||
self._state_listener()
|
||||
self._state_listener = None
|
||||
while len(self._subscriptions):
|
||||
self._subscriptions.pop()()
|
||||
|
||||
def async_watch_sensor_states(
|
||||
self,
|
||||
area_id: str | None = None,
|
||||
old_state: str | None = None,
|
||||
state: str | None = None,
|
||||
):
|
||||
"""Watch sensors based on the state of the alarm entities."""
|
||||
sensors_list = []
|
||||
for area in self.hass.data[const.DOMAIN]["areas"].keys():
|
||||
sensors_list.extend(self.active_sensors_for_alarm_state(area))
|
||||
|
||||
if self._state_listener:
|
||||
self._state_listener()
|
||||
|
||||
if sensors_list:
|
||||
self._state_listener = async_track_state_change_event(
|
||||
self.hass, sensors_list, self.async_sensor_state_changed
|
||||
)
|
||||
else:
|
||||
self._state_listener = None
|
||||
|
||||
# clear previous sensor group events that are not active for current alarm state
|
||||
for group_id in self._group_events.keys():
|
||||
self._group_events[group_id] = dict(
|
||||
filter(
|
||||
lambda el: el[0] in sensors_list,
|
||||
self._group_events[group_id].items(),
|
||||
)
|
||||
)
|
||||
|
||||
# handle initial sensor states
|
||||
if area_id and old_state is None:
|
||||
sensors_list = self.active_sensors_for_alarm_state(area_id)
|
||||
for entity in sensors_list:
|
||||
state = self.hass.states.get(entity)
|
||||
sensor_state = parse_sensor_state(state)
|
||||
if state and state.state and sensor_state != STATE_UNKNOWN:
|
||||
_LOGGER.debug(
|
||||
"Initial state for %s is %s",
|
||||
entity,
|
||||
parse_sensor_state(state),
|
||||
)
|
||||
|
||||
if area_id:
|
||||
self.update_ready_to_arm_status(area_id)
|
||||
|
||||
def active_sensors_for_alarm_state(self, area_id: str, to_state: str | None = None):
|
||||
"""Compose a list of sensors that are active for the state."""
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
|
||||
if to_state:
|
||||
state = to_state
|
||||
else:
|
||||
state = (
|
||||
alarm_entity.arm_mode if alarm_entity.arm_mode else alarm_entity.state
|
||||
)
|
||||
|
||||
entities = []
|
||||
for entity, config in self._config.items():
|
||||
if config["area"] != area_id or not config["enabled"]:
|
||||
continue
|
||||
elif (
|
||||
alarm_entity.bypassed_sensors
|
||||
and entity in alarm_entity.bypassed_sensors
|
||||
):
|
||||
continue
|
||||
elif state in config[const.ATTR_MODES] or config[ATTR_ALWAYS_ON]:
|
||||
entities.append(entity)
|
||||
elif not to_state and config["type"] != SENSOR_TYPE_MOTION:
|
||||
# always watch all sensors other than motion sensors,
|
||||
# to indicate readiness for arming
|
||||
entities.append(entity)
|
||||
|
||||
return entities
|
||||
|
||||
def validate_arming_event(
|
||||
self, area_id: str, target_state: str | None = None, **kwargs
|
||||
):
|
||||
"""Check whether all sensors have the correct state prior to arming."""
|
||||
use_delay = kwargs.get("use_delay", False)
|
||||
bypass_open_sensors = kwargs.get("bypass_open_sensors", False)
|
||||
|
||||
sensors_list = self.active_sensors_for_alarm_state(area_id, target_state)
|
||||
open_sensors = {}
|
||||
bypassed_sensors = []
|
||||
|
||||
alarm_state = target_state
|
||||
if use_delay and alarm_state in const.ARM_MODES:
|
||||
alarm_state = AlarmControlPanelState.ARMING
|
||||
elif use_delay and alarm_state == AlarmControlPanelState.TRIGGERED:
|
||||
alarm_state = AlarmControlPanelState.PENDING
|
||||
|
||||
for entity in sensors_list:
|
||||
sensor_config = self._config[entity]
|
||||
state = self.hass.states.get(entity)
|
||||
sensor_state = parse_sensor_state(state)
|
||||
if not state or not state.state:
|
||||
# entity does not exist in HA
|
||||
res = False
|
||||
else:
|
||||
res = sensor_state_allowed(sensor_state, sensor_config, alarm_state)
|
||||
|
||||
if not res and target_state in const.ARM_MODES:
|
||||
# sensor is active while arming
|
||||
if bypass_open_sensors or (
|
||||
sensor_config[ATTR_AUTO_BYPASS]
|
||||
and target_state in sensor_config[ATTR_AUTO_BYPASS_MODES]
|
||||
):
|
||||
# sensor may be bypassed
|
||||
bypassed_sensors.append(entity)
|
||||
elif sensor_config[ATTR_ALLOW_OPEN] and sensor_state == STATE_OPEN:
|
||||
# sensor is permitted to be open during/after arming
|
||||
continue
|
||||
else:
|
||||
open_sensors[entity] = sensor_state
|
||||
|
||||
return (open_sensors, bypassed_sensors)
|
||||
|
||||
def get_entry_delay_for_trigger(
|
||||
self, open_sensors: dict[str, str], area_id: str, arm_mode: str
|
||||
) -> int | None:
|
||||
"""Calculate entry delay based on type of sensor trigger."""
|
||||
# Check if this is a group trigger
|
||||
if ATTR_GROUP_ID in open_sensors:
|
||||
# For groups: only check for immediate triggers, otherwise use area default
|
||||
for entity_id in open_sensors:
|
||||
if entity_id != ATTR_GROUP_ID and entity_id in self._config:
|
||||
sensor_config = self._config[entity_id]
|
||||
if not sensor_config[ATTR_USE_ENTRY_DELAY]:
|
||||
return 0
|
||||
|
||||
# Groups always use area default (maintainer's preference)
|
||||
return None
|
||||
else:
|
||||
# Individual sensor trigger
|
||||
entity_id = next(iter(open_sensors.keys()))
|
||||
sensor_config = self._config[entity_id]
|
||||
|
||||
if not sensor_config[ATTR_USE_ENTRY_DELAY]:
|
||||
return 0
|
||||
|
||||
# Use sensor's entry delay if set
|
||||
if (
|
||||
ATTR_ENTRY_DELAY in sensor_config
|
||||
and sensor_config[ATTR_ENTRY_DELAY] is not None
|
||||
):
|
||||
return sensor_config[ATTR_ENTRY_DELAY]
|
||||
|
||||
# Fall back to area default (None means use area default)
|
||||
return None
|
||||
|
||||
@callback
|
||||
def async_sensor_state_changed(self, event): # noqa: PLR0915, PLR0912
|
||||
"""Callback fired when a sensor state has changed."""
|
||||
entity = event.data["entity_id"]
|
||||
old_state = parse_sensor_state(event.data["old_state"])
|
||||
new_state = parse_sensor_state(event.data["new_state"])
|
||||
sensor_config = self._config[entity]
|
||||
if old_state == STATE_UNKNOWN:
|
||||
# sensor is unknown at startup,
|
||||
# state which comes after is considered as initial state
|
||||
_LOGGER.debug(
|
||||
"Initial state for %s is %s",
|
||||
entity,
|
||||
new_state,
|
||||
)
|
||||
self.update_ready_to_arm_status(sensor_config["area"])
|
||||
return
|
||||
if old_state == new_state:
|
||||
# not a state change - ignore
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"entity %s changed: old_state=%s, new_state=%s",
|
||||
entity,
|
||||
old_state,
|
||||
new_state,
|
||||
)
|
||||
|
||||
if (
|
||||
new_state == STATE_UNAVAILABLE
|
||||
and not sensor_config[ATTR_TRIGGER_UNAVAILABLE]
|
||||
):
|
||||
# temporarily store the prior state until the sensor becomes available again
|
||||
self._unavailable_state_mem[entity] = old_state
|
||||
elif entity in self._unavailable_state_mem:
|
||||
# if sensor was unavailable, check the state before that,
|
||||
# do not act if the sensor reverted to its prior state.
|
||||
prior_state = self._unavailable_state_mem.pop(entity)
|
||||
if old_state == STATE_UNAVAILABLE and prior_state == new_state:
|
||||
_LOGGER.debug(
|
||||
"state transition from %s to %s to %s detected, ignoring.",
|
||||
prior_state,
|
||||
old_state,
|
||||
new_state,
|
||||
)
|
||||
return
|
||||
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["areas"][sensor_config["area"]]
|
||||
alarm_state = alarm_entity.state
|
||||
|
||||
if (
|
||||
alarm_entity.arm_mode
|
||||
and alarm_entity.arm_mode not in sensor_config[const.ATTR_MODES]
|
||||
and not sensor_config[ATTR_ALWAYS_ON]
|
||||
):
|
||||
# sensor is not active in this arm mode, ignore
|
||||
self.update_ready_to_arm_status(sensor_config["area"])
|
||||
return
|
||||
|
||||
res = sensor_state_allowed(new_state, sensor_config, alarm_state)
|
||||
|
||||
if (
|
||||
sensor_config[ATTR_ARM_ON_CLOSE]
|
||||
and alarm_state == AlarmControlPanelState.ARMING
|
||||
):
|
||||
# we are arming and sensor is configured to arm on closing
|
||||
if new_state == STATE_CLOSED:
|
||||
self.start_arm_timer(entity)
|
||||
else:
|
||||
self.stop_arm_timer(entity)
|
||||
|
||||
if res:
|
||||
# sensor state is OK,
|
||||
# but we still need to clean up group events for closed sensors
|
||||
# A sensor that has closed should not contribute to future group triggers
|
||||
# until it opens again
|
||||
# Clear closed sensors from group events to
|
||||
# prevent stale events from triggering groups later
|
||||
if new_state == STATE_CLOSED:
|
||||
for group_id in list(self._group_events.keys()):
|
||||
if entity in self._group_events[group_id]:
|
||||
del self._group_events[group_id][entity]
|
||||
# Clean up empty group entries
|
||||
if not self._group_events[group_id]:
|
||||
del self._group_events[group_id]
|
||||
self.update_ready_to_arm_status(sensor_config["area"])
|
||||
return
|
||||
|
||||
open_sensors = self.process_group_event(entity, new_state)
|
||||
if not open_sensors:
|
||||
# triggered sensor is part of a group and should be ignored
|
||||
self.update_ready_to_arm_status(sensor_config["area"])
|
||||
return
|
||||
|
||||
if sensor_config[ATTR_ALWAYS_ON]:
|
||||
# immediate trigger due to always on sensor
|
||||
_LOGGER.info(
|
||||
"Alarm is triggered due to an always-on sensor: %s",
|
||||
entity,
|
||||
)
|
||||
alarm_entity.async_trigger(entry_delay=0, open_sensors=open_sensors)
|
||||
|
||||
elif alarm_state == AlarmControlPanelState.ARMING:
|
||||
# sensor triggered while arming, abort arming
|
||||
_LOGGER.debug(
|
||||
"Arming was aborted due to a sensor being active: %s",
|
||||
entity,
|
||||
)
|
||||
alarm_entity.async_arm_failure(open_sensors)
|
||||
|
||||
elif alarm_state in const.ARM_MODES:
|
||||
# standard alarm trigger - calculate entry delay override
|
||||
_LOGGER.info(
|
||||
"Alarm is triggered due to sensor: %s",
|
||||
entity,
|
||||
)
|
||||
entry_delay = self.get_entry_delay_for_trigger(
|
||||
open_sensors, sensor_config["area"], alarm_entity.arm_mode
|
||||
)
|
||||
|
||||
if entry_delay == 0:
|
||||
# immediate trigger (no entry delay)
|
||||
alarm_entity.async_trigger(entry_delay=0, open_sensors=open_sensors)
|
||||
else:
|
||||
# use calculated delay (could be None for area default)
|
||||
alarm_entity.async_trigger(
|
||||
entry_delay=entry_delay, open_sensors=open_sensors
|
||||
)
|
||||
|
||||
elif alarm_state == AlarmControlPanelState.PENDING:
|
||||
# trigger while in pending state
|
||||
# calculate entry delay for possible timer shortening
|
||||
_LOGGER.info(
|
||||
"Alarm is triggered due to sensor: %s",
|
||||
entity,
|
||||
)
|
||||
entry_delay = self.get_entry_delay_for_trigger(
|
||||
open_sensors, sensor_config["area"], alarm_entity.arm_mode
|
||||
)
|
||||
|
||||
if entry_delay == 0:
|
||||
# immediate trigger
|
||||
alarm_entity.async_trigger(entry_delay=0, open_sensors=open_sensors)
|
||||
else:
|
||||
# use calculated delay for possible timer shortening
|
||||
alarm_entity.async_trigger(
|
||||
entry_delay=entry_delay, open_sensors=open_sensors
|
||||
)
|
||||
|
||||
self.update_ready_to_arm_status(sensor_config["area"])
|
||||
|
||||
def start_arm_timer(self, entity):
|
||||
"""Start timer for automatical arming."""
|
||||
|
||||
@callback
|
||||
def timer_finished(now):
|
||||
_LOGGER.debug("timer finished")
|
||||
sensor_config = self._config[entity]
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["areas"][sensor_config["area"]]
|
||||
if alarm_entity.state == AlarmControlPanelState.ARMING:
|
||||
alarm_entity.async_arm(alarm_entity.arm_mode, skip_delay=True)
|
||||
|
||||
now = dt_util.utcnow()
|
||||
|
||||
if entity in self._arm_timers:
|
||||
self.stop_arm_timer(entity)
|
||||
|
||||
self._arm_timers[entity] = async_track_point_in_time(
|
||||
self.hass, timer_finished, now + const.SENSOR_ARM_TIME
|
||||
)
|
||||
|
||||
def stop_arm_timer(self, entity=None):
|
||||
"""Cancel timer(s) for automatical arming."""
|
||||
if entity and entity in self._arm_timers:
|
||||
self._arm_timers[entity]()
|
||||
elif not entity:
|
||||
for key in self._arm_timers.keys():
|
||||
self._arm_timers[key]()
|
||||
|
||||
def process_group_event(self, entity: str, state: str) -> dict:
|
||||
"""Check if sensor entity is member of a group to evaluate trigger."""
|
||||
group_id = None
|
||||
for group in self._groups.values():
|
||||
if entity in group[ATTR_ENTITIES]:
|
||||
group_id = group[ATTR_GROUP_ID]
|
||||
break
|
||||
|
||||
open_sensors = {entity: state}
|
||||
if group_id is None:
|
||||
return open_sensors
|
||||
|
||||
group = self._groups[group_id]
|
||||
group_events = (
|
||||
self._group_events[group_id]
|
||||
if group_id in self._group_events.keys()
|
||||
else {}
|
||||
)
|
||||
now = dt_util.now()
|
||||
group_events[entity] = {ATTR_STATE: state, ATTR_LAST_TRIP_TIME: now}
|
||||
self._group_events[group_id] = group_events
|
||||
recent_events = {
|
||||
entity: (now - event[ATTR_LAST_TRIP_TIME]).total_seconds()
|
||||
for (entity, event) in group_events.items()
|
||||
}
|
||||
recent_events = dict(
|
||||
filter(lambda el: el[1] <= group[ATTR_TIMEOUT], recent_events.items())
|
||||
)
|
||||
if len(recent_events.keys()) < group[ATTR_EVENT_COUNT]:
|
||||
_LOGGER.debug(
|
||||
"tripped sensor %s was ignored since it belongs to group %s",
|
||||
entity,
|
||||
group[ATTR_NAME],
|
||||
)
|
||||
return {}
|
||||
else:
|
||||
for key in recent_events.keys():
|
||||
open_sensors[key] = group_events[key][ATTR_STATE]
|
||||
|
||||
# Add group info for override delay calculation
|
||||
open_sensors[ATTR_GROUP_ID] = group_id
|
||||
_LOGGER.debug(
|
||||
"tripped sensor %s caused the triggering of group %s",
|
||||
entity,
|
||||
group[ATTR_NAME],
|
||||
)
|
||||
return open_sensors
|
||||
|
||||
def update_ready_to_arm_status(self, area_id):
|
||||
"""Calculate whether the system is ready for arming."""
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
|
||||
arm_modes = [
|
||||
mode
|
||||
for (mode, config) in alarm_entity._config[const.ATTR_MODES].items()
|
||||
if config[const.ATTR_ENABLED]
|
||||
]
|
||||
|
||||
if alarm_entity.state in const.ARM_MODES or (
|
||||
alarm_entity.state == AlarmControlPanelState.ARMING
|
||||
and alarm_entity.arm_mode
|
||||
):
|
||||
arm_modes.remove(alarm_entity.arm_mode)
|
||||
|
||||
def arm_mode_is_ready(mode):
|
||||
(blocking_sensors, _bypassed_sensors) = self.validate_arming_event(
|
||||
area_id, mode
|
||||
)
|
||||
if alarm_entity.state == AlarmControlPanelState.DISARMED:
|
||||
# exclude motion sensors when determining readiness
|
||||
blocking_sensors = dict(
|
||||
filter(
|
||||
lambda el: self._config[el[0]]["type"] != SENSOR_TYPE_MOTION,
|
||||
blocking_sensors.items(),
|
||||
)
|
||||
)
|
||||
result = not (blocking_sensors)
|
||||
return result
|
||||
|
||||
arm_modes = list(filter(arm_mode_is_ready, arm_modes))
|
||||
prev_arm_modes = alarm_entity._ready_to_arm_modes
|
||||
|
||||
if arm_modes != prev_arm_modes:
|
||||
alarm_entity.update_ready_to_arm_modes(arm_modes)
|
||||
|
||||
async def _async_evaluate_armed_state_on_startup(self, area_id):
|
||||
"""Evaluate sensors when alarm is armed on startup and trigger if necessary.
|
||||
|
||||
On startup, we don't know the actual previous state of sensors
|
||||
(they might have changed while HA was down).
|
||||
This method simulates state changes for all sensors currently in violation,
|
||||
allowing the standard async_sensor_state_changed logic to re-evaluate them
|
||||
with full group logic, entry delays, etc.
|
||||
"""
|
||||
alarm_entity = self.hass.data[const.DOMAIN]["areas"][area_id]
|
||||
|
||||
# Only evaluate if the alarm is in an armed state
|
||||
if alarm_entity.state not in const.ARM_MODES:
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"Evaluating sensors on startup for area %s (state: %s)",
|
||||
area_id,
|
||||
alarm_entity.state,
|
||||
)
|
||||
|
||||
# Get all active sensors for the current armed mode
|
||||
sensors_list = self.active_sensors_for_alarm_state(area_id)
|
||||
|
||||
for entity_id in sensors_list:
|
||||
sensor_config = self._config[entity_id]
|
||||
state = self.hass.states.get(entity_id)
|
||||
sensor_state = parse_sensor_state(state)
|
||||
|
||||
if sensor_state == STATE_UNKNOWN:
|
||||
# Skip unknown sensors - they'll be handled when they become known
|
||||
continue
|
||||
|
||||
# Check if sensor state is allowed in current alarm state
|
||||
res = sensor_state_allowed(sensor_state, sensor_config, alarm_entity.state)
|
||||
|
||||
if not res:
|
||||
# Sensor is in a violation state
|
||||
# (open or unavailable when it shouldn't be)
|
||||
# Simulate a state change to trigger standard processing
|
||||
_LOGGER.info(
|
||||
"Sensor %s is %s on startup while alarm is %s - simulating state change for evaluation", # noqa: E501
|
||||
entity_id,
|
||||
sensor_state,
|
||||
alarm_entity.state,
|
||||
)
|
||||
|
||||
# Create a synthetic event that mimics
|
||||
# a state change from closed to current state
|
||||
# We use STATE_CLOSED as old state
|
||||
# (not STATE_UNKNOWN which would trigger early return)
|
||||
old_state = SimpleNamespace(state=STATE_CLOSED)
|
||||
|
||||
# Create event with the structure expected by async_sensor_state_changed
|
||||
event = SimpleNamespace(
|
||||
data={
|
||||
"entity_id": entity_id,
|
||||
"old_state": old_state,
|
||||
"new_state": state,
|
||||
}
|
||||
)
|
||||
|
||||
# Process through the standard sensor state change handler
|
||||
# This will handle groups, entry delays, always-on sensors, etc.
|
||||
self.async_sensor_state_changed(event)
|
||||
77
custom_components/alarmo/services.yaml
Normal file
77
custom_components/alarmo/services.yaml
Normal file
@@ -0,0 +1,77 @@
|
||||
|
||||
arm:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "alarm_control_panel.alarm"
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
integration: alarmo
|
||||
domain: alarm_control_panel
|
||||
code:
|
||||
example: "1234"
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
mode:
|
||||
example: "away"
|
||||
required: false
|
||||
default: away
|
||||
selector:
|
||||
select:
|
||||
translation_key: "arm_mode"
|
||||
options:
|
||||
- away
|
||||
- night
|
||||
- home
|
||||
- vacation
|
||||
- custom
|
||||
skip_delay:
|
||||
example: false
|
||||
required: false
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
force:
|
||||
example: false
|
||||
required: false
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
disarm:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "alarm_control_panel.alarm"
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
integration: alarmo
|
||||
domain: alarm_control_panel
|
||||
code:
|
||||
example: "1234"
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
skip_delay:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "alarm_control_panel.alarm"
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
integration: alarmo
|
||||
domain: alarm_control_panel
|
||||
enable_user:
|
||||
fields:
|
||||
name:
|
||||
example: "Frank"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
disable_user:
|
||||
fields:
|
||||
name:
|
||||
example: "Frank"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
721
custom_components/alarmo/store.py
Normal file
721
custom_components/alarmo/store.py
Normal file
@@ -0,0 +1,721 @@
|
||||
"""Storage handler for Alarmo integration."""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from typing import cast
|
||||
from collections import OrderedDict
|
||||
from collections.abc import MutableMapping
|
||||
|
||||
import attr
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.components.alarm_control_panel import CodeFormat
|
||||
|
||||
from . import const
|
||||
from .helpers import omit
|
||||
from .sensors import (
|
||||
SENSOR_TYPE_OTHER,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_REGISTRY = f"{const.DOMAIN}_storage"
|
||||
STORAGE_KEY = f"{const.DOMAIN}.storage"
|
||||
STORAGE_VERSION_MAJOR = 6
|
||||
STORAGE_VERSION_MINOR = 3
|
||||
SAVE_DELAY = 10
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class ModeEntry:
|
||||
"""Mode storage Entry."""
|
||||
|
||||
enabled = attr.ib(type=bool, default=False)
|
||||
exit_time = attr.ib(type=int, default=None)
|
||||
entry_time = attr.ib(type=int, default=None)
|
||||
trigger_time = attr.ib(type=int, default=None)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class MqttConfig:
|
||||
"""MQTT storage Entry."""
|
||||
|
||||
enabled = attr.ib(type=bool, default=False)
|
||||
state_topic = attr.ib(type=str, default="alarmo/state")
|
||||
state_payload = attr.ib(type=dict, default={})
|
||||
command_topic = attr.ib(type=str, default="alarmo/command")
|
||||
command_payload = attr.ib(type=dict, default={})
|
||||
require_code = attr.ib(type=bool, default=True)
|
||||
event_topic = attr.ib(type=str, default="alarmo/event")
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class MasterConfig:
|
||||
"""Master storage Entry."""
|
||||
|
||||
enabled = attr.ib(type=bool, default=True)
|
||||
name = attr.ib(type=str, default="master")
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class AreaEntry:
|
||||
"""Area storage Entry."""
|
||||
|
||||
area_id = attr.ib(type=str, default=None)
|
||||
name = attr.ib(type=str, default=None)
|
||||
modes = attr.ib(
|
||||
type=[str, ModeEntry],
|
||||
default={
|
||||
const.CONF_ALARM_ARMED_AWAY: ModeEntry(),
|
||||
const.CONF_ALARM_ARMED_HOME: ModeEntry(),
|
||||
const.CONF_ALARM_ARMED_NIGHT: ModeEntry(),
|
||||
const.CONF_ALARM_ARMED_CUSTOM_BYPASS: ModeEntry(),
|
||||
const.CONF_ALARM_ARMED_VACATION: ModeEntry(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class Config:
|
||||
"""(General) Config storage Entry."""
|
||||
|
||||
code_arm_required = attr.ib(type=bool, default=False)
|
||||
code_mode_change_required = attr.ib(type=bool, default=False)
|
||||
code_disarm_required = attr.ib(type=bool, default=False)
|
||||
code_format = attr.ib(type=str, default=CodeFormat.NUMBER)
|
||||
disarm_after_trigger = attr.ib(type=bool, default=False)
|
||||
ignore_blocking_sensors_after_trigger = attr.ib(type=bool, default=False)
|
||||
master = attr.ib(type=MasterConfig, default=MasterConfig())
|
||||
mqtt = attr.ib(type=MqttConfig, default=MqttConfig())
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class SensorEntry:
|
||||
"""Sensor storage Entry."""
|
||||
|
||||
entity_id = attr.ib(type=str, default=None)
|
||||
type = attr.ib(type=str, default=SENSOR_TYPE_OTHER)
|
||||
modes = attr.ib(type=list, default=[])
|
||||
use_exit_delay = attr.ib(type=bool, default=True)
|
||||
use_entry_delay = attr.ib(type=bool, default=True)
|
||||
always_on = attr.ib(type=bool, default=False)
|
||||
arm_on_close = attr.ib(type=bool, default=False)
|
||||
allow_open = attr.ib(type=bool, default=False)
|
||||
trigger_unavailable = attr.ib(type=bool, default=False)
|
||||
auto_bypass = attr.ib(type=bool, default=False)
|
||||
auto_bypass_modes = attr.ib(type=list, default=[])
|
||||
area = attr.ib(type=str, default=None)
|
||||
enabled = attr.ib(type=bool, default=True)
|
||||
entry_delay = attr.ib(type=int, default=None)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class UserEntry:
|
||||
"""User storage Entry."""
|
||||
|
||||
user_id = attr.ib(type=str, default=None)
|
||||
name = attr.ib(type=str, default="")
|
||||
enabled = attr.ib(type=bool, default=True)
|
||||
code = attr.ib(type=str, default="")
|
||||
can_arm = attr.ib(type=bool, default=False)
|
||||
can_disarm = attr.ib(type=bool, default=False)
|
||||
is_override_code = attr.ib(type=bool, default=False)
|
||||
code_format = attr.ib(type=str, default="")
|
||||
code_length = attr.ib(type=int, default=0)
|
||||
area_limit = attr.ib(type=list, default=[])
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class AlarmoTriggerEntry:
|
||||
"""Trigger storage Entry."""
|
||||
|
||||
event = attr.ib(type=str, default="")
|
||||
area = attr.ib(type=str, default=None)
|
||||
modes = attr.ib(type=list, default=[])
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class EntityTriggerEntry:
|
||||
"""Trigger storage Entry."""
|
||||
|
||||
entity_id = attr.ib(type=str, default=None)
|
||||
state = attr.ib(type=str, default=None)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class ActionEntry:
|
||||
"""Action storage Entry."""
|
||||
|
||||
service = attr.ib(type=str, default="")
|
||||
entity_id = attr.ib(type=str, default=None)
|
||||
data = attr.ib(type=dict, default={})
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class AutomationEntry:
|
||||
"""Automation storage Entry."""
|
||||
|
||||
automation_id = attr.ib(type=str, default=None)
|
||||
type = attr.ib(type=str, default=None)
|
||||
name = attr.ib(type=str, default="")
|
||||
triggers = attr.ib(type=[AlarmoTriggerEntry], default=[])
|
||||
actions = attr.ib(type=[ActionEntry], default=[])
|
||||
enabled = attr.ib(type=bool, default=True)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class SensorGroupEntry:
|
||||
"""Sensor group storage Entry."""
|
||||
|
||||
group_id = attr.ib(type=str, default=None)
|
||||
name = attr.ib(type=str, default="")
|
||||
entities = attr.ib(type=list, default=[])
|
||||
timeout = attr.ib(type=int, default=0)
|
||||
event_count = attr.ib(type=int, default=2)
|
||||
|
||||
|
||||
def parse_automation_entry(data: dict):
|
||||
"""Parse automation entry from dict to proper types."""
|
||||
|
||||
def create_trigger_entity(config: dict):
|
||||
if "event" in config:
|
||||
return AlarmoTriggerEntry(**config)
|
||||
else:
|
||||
return EntityTriggerEntry(**config)
|
||||
|
||||
output = {}
|
||||
if "triggers" in data:
|
||||
output["triggers"] = list(map(create_trigger_entity, data["triggers"]))
|
||||
if "actions" in data:
|
||||
output["actions"] = list(map(lambda el: ActionEntry(**el), data["actions"]))
|
||||
if "automation_id" in data:
|
||||
output["automation_id"] = data["automation_id"]
|
||||
if "name" in data:
|
||||
output["name"] = data["name"]
|
||||
if "type" in data:
|
||||
output["type"] = data["type"]
|
||||
if "enabled" in data:
|
||||
output["enabled"] = data["enabled"]
|
||||
return output
|
||||
|
||||
|
||||
class MigratableStore(Store):
|
||||
"""Storage class that can migrate data between versions."""
|
||||
|
||||
async def _async_migrate_func(
|
||||
self, old_major_version: int, old_minor_version: int, data: dict
|
||||
):
|
||||
def migrate_automation(data):
|
||||
if old_major_version <= 2:
|
||||
data["triggers"] = [
|
||||
{
|
||||
"event": el["state"] if "state" in el else el["event"],
|
||||
"area": el.get("area"),
|
||||
"modes": data["modes"],
|
||||
}
|
||||
for el in data["triggers"]
|
||||
]
|
||||
|
||||
data["type"] = (
|
||||
"notification" if data.get("is_notification") else "action"
|
||||
)
|
||||
|
||||
if old_major_version <= 5:
|
||||
data["actions"] = [
|
||||
{
|
||||
"service": el.get("service"),
|
||||
"entity_id": el.get("entity_id"),
|
||||
"data": el.get("service_data"),
|
||||
}
|
||||
for el in data["actions"]
|
||||
]
|
||||
|
||||
return attr.asdict(AutomationEntry(**parse_automation_entry(data)))
|
||||
|
||||
if old_major_version == 1:
|
||||
area_id = str(int(time.time()))
|
||||
data["areas"] = [
|
||||
attr.asdict(
|
||||
AreaEntry(
|
||||
**{
|
||||
"name": "Alarmo",
|
||||
"modes": {
|
||||
mode: attr.asdict(
|
||||
ModeEntry(
|
||||
enabled=bool(config["enabled"]),
|
||||
exit_time=int(config["leave_time"] or 0),
|
||||
entry_time=int(config["entry_time"] or 0),
|
||||
trigger_time=int(
|
||||
data["config"]["trigger_time"] or 0
|
||||
),
|
||||
)
|
||||
)
|
||||
for (mode, config) in data["config"]["modes"].items()
|
||||
},
|
||||
},
|
||||
area_id=area_id,
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
if "sensors" in data:
|
||||
for sensor in data["sensors"]:
|
||||
sensor["area"] = area_id
|
||||
|
||||
if old_major_version <= 3:
|
||||
data["sensors"] = [
|
||||
attr.asdict(
|
||||
SensorEntry(
|
||||
**{
|
||||
**omit(sensor, ["immediate", "name"]),
|
||||
"use_exit_delay": not sensor["immediate"]
|
||||
and not sensor["always_on"],
|
||||
"use_entry_delay": not sensor["immediate"]
|
||||
and not sensor["always_on"],
|
||||
"auto_bypass_modes": sensor["modes"]
|
||||
if sensor.get("auto_bypass")
|
||||
else [],
|
||||
}
|
||||
)
|
||||
)
|
||||
for sensor in data["sensors"]
|
||||
]
|
||||
|
||||
if old_major_version <= 4:
|
||||
data["sensors"] = [
|
||||
attr.asdict(
|
||||
SensorEntry(
|
||||
**omit(sensor, ["name"]),
|
||||
)
|
||||
)
|
||||
for sensor in data["sensors"]
|
||||
]
|
||||
|
||||
data["automations"] = [
|
||||
migrate_automation(automation) for automation in data["automations"]
|
||||
]
|
||||
|
||||
if old_major_version <= 5 or (old_major_version == 6 and old_minor_version < 2):
|
||||
data["config"] = attr.asdict(
|
||||
Config(
|
||||
**omit(data["config"], ["code_mode_change_required"]),
|
||||
code_mode_change_required=data["config"]["code_arm_required"],
|
||||
)
|
||||
)
|
||||
|
||||
if old_major_version <= 5 or (old_major_version == 6 and old_minor_version < 3):
|
||||
data["sensor_groups"] = [
|
||||
attr.asdict(
|
||||
SensorGroupEntry(
|
||||
**{
|
||||
**omit(sensorGroup, ["entities"]),
|
||||
"entities": list(set(sensorGroup["entities"])),
|
||||
}
|
||||
)
|
||||
)
|
||||
for sensorGroup in data["sensor_groups"]
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class AlarmoStorage:
|
||||
"""Class to hold alarmo configuration data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the storage."""
|
||||
self.hass = hass
|
||||
self.config: Config = Config()
|
||||
self.areas: MutableMapping[str, AreaEntry] = {}
|
||||
self.sensors: MutableMapping[str, SensorEntry] = {}
|
||||
self.users: MutableMapping[str, UserEntry] = {}
|
||||
self.automations: MutableMapping[str, AutomationEntry] = {}
|
||||
self.sensor_groups: MutableMapping[str, SensorGroupEntry] = {}
|
||||
self._store = MigratableStore(
|
||||
hass,
|
||||
STORAGE_VERSION_MAJOR,
|
||||
STORAGE_KEY,
|
||||
minor_version=STORAGE_VERSION_MINOR,
|
||||
)
|
||||
|
||||
async def async_load(self) -> None: # noqa: PLR0912
|
||||
"""Load the registry of schedule entries."""
|
||||
data = await self._store.async_load()
|
||||
config: Config = Config()
|
||||
areas: OrderedDict[str, AreaEntry] = OrderedDict()
|
||||
sensors: OrderedDict[str, SensorEntry] = OrderedDict()
|
||||
users: OrderedDict[str, UserEntry] = OrderedDict()
|
||||
automations: OrderedDict[str, AutomationEntry] = OrderedDict()
|
||||
sensor_groups: OrderedDict[str, SensorGroupEntry] = OrderedDict()
|
||||
|
||||
if data is not None:
|
||||
config = Config(
|
||||
code_arm_required=data["config"]["code_arm_required"],
|
||||
code_mode_change_required=data["config"]["code_mode_change_required"],
|
||||
code_disarm_required=data["config"]["code_disarm_required"],
|
||||
code_format=data["config"]["code_format"],
|
||||
disarm_after_trigger=data["config"]["disarm_after_trigger"],
|
||||
ignore_blocking_sensors_after_trigger=data["config"].get(
|
||||
"ignore_blocking_sensors_after_trigger", False
|
||||
),
|
||||
)
|
||||
|
||||
if "mqtt" in data["config"]:
|
||||
config = attr.evolve(
|
||||
config,
|
||||
**{
|
||||
"mqtt": MqttConfig(**data["config"]["mqtt"]),
|
||||
},
|
||||
)
|
||||
|
||||
if "master" in data["config"]:
|
||||
config = attr.evolve(
|
||||
config,
|
||||
**{
|
||||
"master": MasterConfig(**data["config"]["master"]),
|
||||
},
|
||||
)
|
||||
|
||||
if "areas" in data:
|
||||
for area in data["areas"]:
|
||||
modes = {
|
||||
mode: ModeEntry(
|
||||
enabled=config["enabled"],
|
||||
exit_time=config["exit_time"],
|
||||
entry_time=config["entry_time"],
|
||||
trigger_time=config["trigger_time"],
|
||||
)
|
||||
for (mode, config) in area["modes"].items()
|
||||
}
|
||||
areas[area["area_id"]] = AreaEntry(
|
||||
area_id=area["area_id"], name=area["name"], modes=modes
|
||||
)
|
||||
|
||||
if "sensors" in data:
|
||||
for sensor in data["sensors"]:
|
||||
sensors[sensor["entity_id"]] = SensorEntry(**sensor)
|
||||
|
||||
if "users" in data:
|
||||
for user in data["users"]:
|
||||
users[user["user_id"]] = UserEntry(**omit(user, ["is_admin"]))
|
||||
|
||||
if "automations" in data:
|
||||
for automation in data["automations"]:
|
||||
automations[automation["automation_id"]] = AutomationEntry(
|
||||
**parse_automation_entry(automation)
|
||||
)
|
||||
|
||||
if "sensor_groups" in data:
|
||||
for group in data["sensor_groups"]:
|
||||
sensor_groups[group["group_id"]] = SensorGroupEntry(**group)
|
||||
|
||||
self.config = config
|
||||
self.areas = areas
|
||||
self.sensors = sensors
|
||||
self.automations = automations
|
||||
self.users = users
|
||||
self.sensor_groups = sensor_groups
|
||||
|
||||
if not areas:
|
||||
await self.async_factory_default()
|
||||
|
||||
async def async_factory_default(self):
|
||||
"""Reset to factory default configuration."""
|
||||
self.async_create_area(
|
||||
{
|
||||
"name": "Alarmo",
|
||||
"modes": {
|
||||
const.CONF_ALARM_ARMED_AWAY: attr.asdict(
|
||||
ModeEntry(
|
||||
enabled=True, exit_time=60, entry_time=60, trigger_time=1800
|
||||
)
|
||||
),
|
||||
const.CONF_ALARM_ARMED_HOME: attr.asdict(
|
||||
ModeEntry(enabled=True, trigger_time=1800)
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_schedule_save(self) -> None:
|
||||
"""Schedule saving the registry of alarmo."""
|
||||
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
|
||||
|
||||
async def async_save(self) -> None:
|
||||
"""Save the registry of alarmo."""
|
||||
await self._store.async_save(self._data_to_save())
|
||||
|
||||
@callback
|
||||
def _data_to_save(self) -> dict:
|
||||
"""Return data for the registry for alarmo to store in a file."""
|
||||
store_data = {
|
||||
"config": attr.asdict(self.config),
|
||||
}
|
||||
|
||||
store_data["areas"] = [attr.asdict(entry) for entry in self.areas.values()]
|
||||
store_data["sensors"] = [attr.asdict(entry) for entry in self.sensors.values()]
|
||||
store_data["users"] = [attr.asdict(entry) for entry in self.users.values()]
|
||||
store_data["automations"] = [
|
||||
attr.asdict(entry) for entry in self.automations.values()
|
||||
]
|
||||
store_data["sensor_groups"] = [
|
||||
attr.asdict(entry) for entry in self.sensor_groups.values()
|
||||
]
|
||||
|
||||
return store_data
|
||||
|
||||
async def async_delete(self):
|
||||
"""Delete config."""
|
||||
_LOGGER.warning("Removing alarmo configuration data!")
|
||||
await self._store.async_remove()
|
||||
self.config = Config()
|
||||
self.areas = {}
|
||||
self.sensors = {}
|
||||
self.users = {}
|
||||
self.automations = {}
|
||||
self.sensor_groups = {}
|
||||
await self.async_factory_default()
|
||||
|
||||
@callback
|
||||
def async_get_config(self):
|
||||
"""Get current config."""
|
||||
return attr.asdict(self.config)
|
||||
|
||||
@callback
|
||||
def async_update_config(self, changes: dict):
|
||||
"""Update existing config."""
|
||||
old = self.config
|
||||
new = self.config = attr.evolve(old, **changes)
|
||||
self.async_schedule_save()
|
||||
return attr.asdict(new)
|
||||
|
||||
@callback
|
||||
def async_update_mode_config(self, mode: str, changes: dict):
|
||||
"""Update existing config."""
|
||||
modes = self.config.modes
|
||||
old = self.config.modes[mode] if mode in self.config.modes else ModeEntry()
|
||||
new = attr.evolve(old, **changes)
|
||||
modes[mode] = new
|
||||
self.config = attr.evolve(self.config, **{"modes": modes})
|
||||
self.async_schedule_save()
|
||||
return new
|
||||
|
||||
@callback
|
||||
def async_get_area(self, area_id) -> AreaEntry:
|
||||
"""Get an existing AreaEntry by id."""
|
||||
res = self.areas.get(area_id)
|
||||
return attr.asdict(res) if res else None
|
||||
|
||||
@callback
|
||||
def async_get_areas(self):
|
||||
"""Get an existing AreaEntry by id."""
|
||||
res = {}
|
||||
for key, val in self.areas.items():
|
||||
res[key] = attr.asdict(val)
|
||||
return res
|
||||
|
||||
@callback
|
||||
def async_create_area(self, data: dict) -> AreaEntry:
|
||||
"""Create a new AreaEntry."""
|
||||
area_id = str(int(time.time()))
|
||||
new_area = AreaEntry(**data, area_id=area_id)
|
||||
self.areas[area_id] = new_area
|
||||
self.async_schedule_save()
|
||||
return attr.asdict(new_area)
|
||||
|
||||
@callback
|
||||
def async_delete_area(self, area_id: str) -> None:
|
||||
"""Delete AreaEntry."""
|
||||
if area_id in self.areas:
|
||||
del self.areas[area_id]
|
||||
self.async_schedule_save()
|
||||
return True
|
||||
return False
|
||||
|
||||
@callback
|
||||
def async_update_area(self, area_id: str, changes: dict) -> AreaEntry:
|
||||
"""Update existing self."""
|
||||
old = self.areas[area_id]
|
||||
new = self.areas[area_id] = attr.evolve(old, **changes)
|
||||
self.async_schedule_save()
|
||||
return attr.asdict(new)
|
||||
|
||||
@callback
|
||||
def async_get_sensor(self, entity_id) -> SensorEntry:
|
||||
"""Get an existing SensorEntry by id."""
|
||||
res = self.sensors.get(entity_id)
|
||||
return attr.asdict(res) if res else None
|
||||
|
||||
@callback
|
||||
def async_get_sensors(self):
|
||||
"""Get an existing SensorEntry by id."""
|
||||
res = {}
|
||||
for key, val in self.sensors.items():
|
||||
res[key] = attr.asdict(val)
|
||||
return res
|
||||
|
||||
@callback
|
||||
def async_create_sensor(self, entity_id: str, data: dict) -> SensorEntry:
|
||||
"""Create a new SensorEntry."""
|
||||
if entity_id in self.sensors:
|
||||
return False
|
||||
new_sensor = SensorEntry(**data, entity_id=entity_id)
|
||||
self.sensors[entity_id] = new_sensor
|
||||
self.async_schedule_save()
|
||||
return new_sensor
|
||||
|
||||
@callback
|
||||
def async_delete_sensor(self, entity_id: str) -> None:
|
||||
"""Delete SensorEntry."""
|
||||
if entity_id in self.sensors:
|
||||
del self.sensors[entity_id]
|
||||
self.async_schedule_save()
|
||||
return True
|
||||
return False
|
||||
|
||||
@callback
|
||||
def async_update_sensor(self, entity_id: str, changes: dict) -> SensorEntry:
|
||||
"""Update existing SensorEntry."""
|
||||
old = self.sensors[entity_id]
|
||||
new = self.sensors[entity_id] = attr.evolve(old, **changes)
|
||||
self.async_schedule_save()
|
||||
return new
|
||||
|
||||
@callback
|
||||
def async_get_user(self, user_id) -> UserEntry:
|
||||
"""Get an existing UserEntry by id."""
|
||||
res = self.users.get(user_id)
|
||||
return attr.asdict(res) if res else None
|
||||
|
||||
@callback
|
||||
def async_get_users(self):
|
||||
"""Get an existing UserEntry by id."""
|
||||
res = {}
|
||||
for key, val in self.users.items():
|
||||
res[key] = attr.asdict(val)
|
||||
return res
|
||||
|
||||
@callback
|
||||
def async_create_user(self, data: dict) -> UserEntry:
|
||||
"""Create a new UserEntry."""
|
||||
user_id = str(int(time.time()))
|
||||
new_user = UserEntry(**data, user_id=user_id)
|
||||
self.users[user_id] = new_user
|
||||
self.async_schedule_save()
|
||||
return new_user
|
||||
|
||||
@callback
|
||||
def async_delete_user(self, user_id: str) -> None:
|
||||
"""Delete UserEntry."""
|
||||
if user_id in self.users:
|
||||
del self.users[user_id]
|
||||
self.async_schedule_save()
|
||||
return True
|
||||
return False
|
||||
|
||||
@callback
|
||||
def async_update_user(self, user_id: str, changes: dict) -> UserEntry:
|
||||
"""Update existing UserEntry."""
|
||||
old = self.users[user_id]
|
||||
new = self.users[user_id] = attr.evolve(old, **changes)
|
||||
self.async_schedule_save()
|
||||
return new
|
||||
|
||||
@callback
|
||||
def async_get_automations(self):
|
||||
"""Get an existing AutomationEntry by id."""
|
||||
res = {}
|
||||
for key, val in self.automations.items():
|
||||
res[key] = attr.asdict(val)
|
||||
return res
|
||||
|
||||
@callback
|
||||
def async_create_automation(self, data: dict) -> AutomationEntry:
|
||||
"""Create a new AutomationEntry."""
|
||||
automation_id = str(int(time.time()))
|
||||
new_automation = AutomationEntry(
|
||||
**parse_automation_entry(data), automation_id=automation_id
|
||||
)
|
||||
self.automations[automation_id] = new_automation
|
||||
self.async_schedule_save()
|
||||
return new_automation
|
||||
|
||||
@callback
|
||||
def async_delete_automation(self, automation_id: str) -> None:
|
||||
"""Delete AutomationEntry."""
|
||||
if automation_id in self.automations:
|
||||
del self.automations[automation_id]
|
||||
self.async_schedule_save()
|
||||
return True
|
||||
return False
|
||||
|
||||
@callback
|
||||
def async_update_automation(
|
||||
self, automation_id: str, changes: dict
|
||||
) -> AutomationEntry:
|
||||
"""Update existing AutomationEntry."""
|
||||
old = self.automations[automation_id]
|
||||
new = self.automations[automation_id] = attr.evolve(
|
||||
old, **parse_automation_entry(changes)
|
||||
)
|
||||
self.async_schedule_save()
|
||||
return new
|
||||
|
||||
@callback
|
||||
def async_get_sensor_group(self, group_id) -> SensorGroupEntry:
|
||||
"""Get an existing SensorGroupEntry by id."""
|
||||
res = self.sensor_groups.get(group_id)
|
||||
return attr.asdict(res) if res else None
|
||||
|
||||
@callback
|
||||
def async_get_sensor_groups(self):
|
||||
"""Get an existing SensorGroupEntry by id."""
|
||||
res = {}
|
||||
for key, val in self.sensor_groups.items():
|
||||
res[key] = attr.asdict(val)
|
||||
return res
|
||||
|
||||
@callback
|
||||
def async_create_sensor_group(self, data: dict) -> SensorGroupEntry:
|
||||
"""Create a new SensorGroupEntry."""
|
||||
group_id = str(int(time.time()))
|
||||
new_group = SensorGroupEntry(**data, group_id=group_id)
|
||||
self.sensor_groups[group_id] = new_group
|
||||
self.async_schedule_save()
|
||||
return group_id
|
||||
|
||||
@callback
|
||||
def async_delete_sensor_group(self, group_id: str) -> None:
|
||||
"""Delete SensorGroupEntry."""
|
||||
if group_id in self.sensor_groups:
|
||||
del self.sensor_groups[group_id]
|
||||
self.async_schedule_save()
|
||||
return True
|
||||
return False
|
||||
|
||||
@callback
|
||||
def async_update_sensor_group(
|
||||
self, group_id: str, changes: dict
|
||||
) -> SensorGroupEntry:
|
||||
"""Update existing SensorGroupEntry."""
|
||||
old = self.sensor_groups[group_id]
|
||||
new = self.sensor_groups[group_id] = attr.evolve(old, **changes)
|
||||
self.async_schedule_save()
|
||||
return new
|
||||
|
||||
|
||||
async def async_get_registry(hass: HomeAssistant) -> AlarmoStorage:
|
||||
"""Return alarmo storage instance."""
|
||||
task = hass.data.get(DATA_REGISTRY)
|
||||
|
||||
if task is None:
|
||||
|
||||
async def _load_reg() -> AlarmoStorage:
|
||||
registry = AlarmoStorage(hass)
|
||||
await registry.async_load()
|
||||
return registry
|
||||
|
||||
task = hass.data[DATA_REGISTRY] = hass.async_create_task(_load_reg())
|
||||
|
||||
return cast(AlarmoStorage, await task)
|
||||
594
custom_components/alarmo/websockets.py
Normal file
594
custom_components/alarmo/websockets.py
Normal file
@@ -0,0 +1,594 @@
|
||||
"""WebSocket handler and registration for Alarmo configuration management."""
|
||||
|
||||
import voluptuous as vol
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import (
|
||||
ATTR_CODE,
|
||||
ATTR_NAME,
|
||||
ATTR_STATE,
|
||||
ATTR_SERVICE,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_CODE_FORMAT,
|
||||
CONF_SERVICE_DATA,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.mqtt import (
|
||||
DOMAIN as ATTR_MQTT,
|
||||
)
|
||||
from homeassistant.components.mqtt import (
|
||||
CONF_STATE_TOPIC,
|
||||
CONF_COMMAND_TOPIC,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
async_dispatcher_connect,
|
||||
)
|
||||
from homeassistant.components.websocket_api import decorators, async_register_command
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
ATTR_CODE_ARM_REQUIRED,
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
|
||||
from . import const
|
||||
from .mqtt import (
|
||||
CONF_EVENT_TOPIC,
|
||||
)
|
||||
from .sensors import (
|
||||
ATTR_GROUP,
|
||||
ATTR_TIMEOUT,
|
||||
SENSOR_TYPES,
|
||||
ATTR_ENTITIES,
|
||||
ATTR_GROUP_ID,
|
||||
ATTR_ALWAYS_ON,
|
||||
ATTR_ALLOW_OPEN,
|
||||
ATTR_AUTO_BYPASS,
|
||||
ATTR_ENTRY_DELAY,
|
||||
ATTR_EVENT_COUNT,
|
||||
ATTR_ARM_ON_CLOSE,
|
||||
ATTR_NEW_ENTITY_ID,
|
||||
ATTR_USE_EXIT_DELAY,
|
||||
ATTR_USE_ENTRY_DELAY,
|
||||
ATTR_AUTO_BYPASS_MODES,
|
||||
ATTR_TRIGGER_UNAVAILABLE,
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@decorators.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "alarmo_config_updated",
|
||||
}
|
||||
)
|
||||
@decorators.async_response
|
||||
async def handle_subscribe_updates(hass, connection, msg):
|
||||
"""Handle subscribe updates."""
|
||||
|
||||
@callback
|
||||
def async_handle_event():
|
||||
"""Forward events to websocket."""
|
||||
connection.send_message(
|
||||
{
|
||||
"id": msg["id"],
|
||||
"type": "event",
|
||||
}
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_dispatcher_connect(
|
||||
hass, "alarmo_update_frontend", async_handle_event
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
class AlarmoConfigView(HomeAssistantView):
|
||||
"""Login to Home Assistant cloud."""
|
||||
|
||||
url = "/api/alarmo/config"
|
||||
name = "api:alarmo:config"
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CODE_ARM_REQUIRED): cv.boolean,
|
||||
vol.Optional(const.ATTR_CODE_DISARM_REQUIRED): cv.boolean,
|
||||
vol.Optional(
|
||||
const.ATTR_IGNORE_BLOCKING_SENSORS_AFTER_TRIGGER
|
||||
): cv.boolean,
|
||||
vol.Optional(const.ATTR_CODE_MODE_CHANGE_REQUIRED): cv.boolean,
|
||||
vol.Optional(ATTR_CODE_FORMAT): vol.In(
|
||||
[CodeFormat.NUMBER, CodeFormat.TEXT]
|
||||
),
|
||||
vol.Optional(const.ATTR_TRIGGER_TIME): cv.positive_int,
|
||||
vol.Optional(const.ATTR_DISARM_AFTER_TRIGGER): cv.boolean,
|
||||
vol.Optional(ATTR_MQTT): vol.Schema(
|
||||
{
|
||||
vol.Required(const.ATTR_ENABLED): cv.boolean,
|
||||
vol.Required(CONF_STATE_TOPIC): cv.string,
|
||||
vol.Optional(const.ATTR_STATE_PAYLOAD): vol.Schema(
|
||||
{
|
||||
vol.Optional(const.CONF_ALARM_DISARMED): cv.string,
|
||||
vol.Optional(const.CONF_ALARM_ARMED_HOME): cv.string,
|
||||
vol.Optional(const.CONF_ALARM_ARMED_AWAY): cv.string,
|
||||
vol.Optional(const.CONF_ALARM_ARMED_NIGHT): cv.string,
|
||||
vol.Optional(
|
||||
const.CONF_ALARM_ARMED_CUSTOM_BYPASS
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
const.CONF_ALARM_ARMED_VACATION
|
||||
): cv.string,
|
||||
vol.Optional(const.CONF_ALARM_PENDING): cv.string,
|
||||
vol.Optional(const.CONF_ALARM_ARMING): cv.string,
|
||||
vol.Optional(const.CONF_ALARM_TRIGGERED): cv.string,
|
||||
}
|
||||
),
|
||||
vol.Required(CONF_COMMAND_TOPIC): cv.string,
|
||||
vol.Optional(const.ATTR_COMMAND_PAYLOAD): vol.Schema(
|
||||
{
|
||||
vol.Optional(const.COMMAND_ARM_AWAY): cv.string,
|
||||
vol.Optional(const.COMMAND_ARM_HOME): cv.string,
|
||||
vol.Optional(const.COMMAND_ARM_NIGHT): cv.string,
|
||||
vol.Optional(
|
||||
const.COMMAND_ARM_CUSTOM_BYPASS
|
||||
): cv.string,
|
||||
vol.Optional(const.COMMAND_ARM_VACATION): cv.string,
|
||||
vol.Optional(const.COMMAND_DISARM): cv.string,
|
||||
}
|
||||
),
|
||||
vol.Required(const.ATTR_REQUIRE_CODE): cv.boolean,
|
||||
vol.Required(CONF_EVENT_TOPIC): cv.string,
|
||||
}
|
||||
),
|
||||
vol.Optional(const.ATTR_MASTER): vol.Schema(
|
||||
{
|
||||
vol.Required(const.ATTR_ENABLED): cv.boolean,
|
||||
vol.Optional(ATTR_NAME): cv.string,
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
)
|
||||
async def post(self, request, data):
|
||||
"""Handle config update request."""
|
||||
hass = request.app["hass"]
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
await coordinator.async_update_config(data)
|
||||
async_dispatcher_send(hass, "alarmo_update_frontend")
|
||||
return self.json({"success": True})
|
||||
|
||||
|
||||
class AlarmoAreaView(HomeAssistantView):
|
||||
"""Login to Home Assistant cloud."""
|
||||
|
||||
url = "/api/alarmo/area"
|
||||
name = "api:alarmo:area"
|
||||
|
||||
mode_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(const.ATTR_ENABLED): cv.boolean,
|
||||
vol.Required(const.ATTR_EXIT_TIME): vol.Any(cv.positive_int, None),
|
||||
vol.Required(const.ATTR_ENTRY_TIME): vol.Any(cv.positive_int, None),
|
||||
vol.Optional(const.ATTR_TRIGGER_TIME): vol.Any(cv.positive_int, None),
|
||||
}
|
||||
)
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional("area_id"): cv.string,
|
||||
vol.Optional(ATTR_NAME): cv.string,
|
||||
vol.Optional(const.ATTR_REMOVE): cv.boolean,
|
||||
vol.Optional(const.ATTR_MODES): vol.Schema(
|
||||
{
|
||||
vol.Optional(const.CONF_ALARM_ARMED_AWAY): mode_schema,
|
||||
vol.Optional(const.CONF_ALARM_ARMED_HOME): mode_schema,
|
||||
vol.Optional(const.CONF_ALARM_ARMED_NIGHT): mode_schema,
|
||||
vol.Optional(const.CONF_ALARM_ARMED_CUSTOM_BYPASS): mode_schema,
|
||||
vol.Optional(const.CONF_ALARM_ARMED_VACATION): mode_schema,
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
)
|
||||
async def post(self, request, data):
|
||||
"""Handle config update request."""
|
||||
hass = request.app["hass"]
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
if "area_id" in data:
|
||||
area = data["area_id"]
|
||||
del data["area_id"]
|
||||
else:
|
||||
area = None
|
||||
await coordinator.async_update_area_config(area, data)
|
||||
async_dispatcher_send(hass, "alarmo_update_frontend")
|
||||
return self.json({"success": True})
|
||||
|
||||
|
||||
class AlarmoSensorView(HomeAssistantView):
|
||||
"""Login to Home Assistant cloud."""
|
||||
|
||||
url = "/api/alarmo/sensors"
|
||||
name = "api:alarmo:sensors"
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(const.ATTR_REMOVE): cv.boolean,
|
||||
vol.Optional(const.ATTR_TYPE): vol.In(SENSOR_TYPES),
|
||||
vol.Optional(const.ATTR_MODES): vol.All(
|
||||
cv.ensure_list, [vol.In(const.ARM_MODES)]
|
||||
),
|
||||
vol.Optional(ATTR_USE_EXIT_DELAY): cv.boolean,
|
||||
vol.Optional(ATTR_USE_ENTRY_DELAY): cv.boolean,
|
||||
vol.Optional(ATTR_ARM_ON_CLOSE): cv.boolean,
|
||||
vol.Optional(ATTR_ALLOW_OPEN): cv.boolean,
|
||||
vol.Optional(ATTR_ALWAYS_ON): cv.boolean,
|
||||
vol.Optional(ATTR_TRIGGER_UNAVAILABLE): cv.boolean,
|
||||
vol.Optional(ATTR_AUTO_BYPASS): cv.boolean,
|
||||
vol.Optional(ATTR_AUTO_BYPASS_MODES): vol.All(
|
||||
cv.ensure_list, [vol.In(const.ARM_MODES)]
|
||||
),
|
||||
vol.Optional(const.ATTR_AREA): cv.string,
|
||||
vol.Optional(const.ATTR_ENABLED): cv.boolean,
|
||||
vol.Optional(ATTR_GROUP): vol.Any(cv.string, None),
|
||||
vol.Optional(ATTR_ENTRY_DELAY): vol.Any(cv.positive_int, None),
|
||||
vol.Optional(ATTR_NEW_ENTITY_ID): cv.string,
|
||||
}
|
||||
)
|
||||
)
|
||||
async def post(self, request, data):
|
||||
"""Handle config update request."""
|
||||
hass = request.app["hass"]
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
entity = data[ATTR_ENTITY_ID]
|
||||
del data[ATTR_ENTITY_ID]
|
||||
coordinator.async_update_sensor_config(entity, data)
|
||||
async_dispatcher_send(hass, "alarmo_update_frontend")
|
||||
return self.json({"success": True})
|
||||
|
||||
|
||||
class AlarmoUserView(HomeAssistantView):
|
||||
"""Login to Home Assistant cloud."""
|
||||
|
||||
url = "/api/alarmo/users"
|
||||
name = "api:alarmo:users"
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(const.ATTR_USER_ID): cv.string,
|
||||
vol.Optional(const.ATTR_REMOVE): cv.boolean,
|
||||
vol.Optional(ATTR_NAME): cv.string,
|
||||
vol.Optional(const.ATTR_ENABLED): cv.boolean,
|
||||
vol.Optional(ATTR_CODE): cv.string,
|
||||
vol.Optional(const.ATTR_OLD_CODE): cv.string,
|
||||
vol.Optional(const.ATTR_CAN_ARM): cv.boolean,
|
||||
vol.Optional(const.ATTR_CAN_DISARM): cv.boolean,
|
||||
vol.Optional(const.ATTR_IS_OVERRIDE_CODE): cv.boolean,
|
||||
vol.Optional(const.ATTR_AREA_LIMIT): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
}
|
||||
)
|
||||
)
|
||||
async def post(self, request, data):
|
||||
"""Handle config update request."""
|
||||
hass = request.app["hass"]
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
user_id = None
|
||||
if const.ATTR_USER_ID in data:
|
||||
user_id = data[const.ATTR_USER_ID]
|
||||
del data[const.ATTR_USER_ID]
|
||||
err = coordinator.async_update_user_config(user_id, data)
|
||||
async_dispatcher_send(hass, "alarmo_update_frontend")
|
||||
return self.json({"success": not isinstance(err, str), "error": err})
|
||||
|
||||
|
||||
class AlarmoAutomationView(HomeAssistantView):
|
||||
"""Login to Home Assistant cloud."""
|
||||
|
||||
url = "/api/alarmo/automations"
|
||||
name = "api:alarmo:automations"
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(const.ATTR_AUTOMATION_ID): cv.string,
|
||||
vol.Optional(ATTR_NAME): cv.string,
|
||||
vol.Optional(const.ATTR_TYPE): cv.string,
|
||||
vol.Optional(const.ATTR_TRIGGERS): vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(const.ATTR_EVENT): cv.string,
|
||||
vol.Optional(const.ATTR_AREA): vol.Any(
|
||||
int,
|
||||
cv.string,
|
||||
),
|
||||
vol.Optional(const.ATTR_MODES): vol.All(
|
||||
cv.ensure_list, [vol.In(const.ARM_MODES)]
|
||||
),
|
||||
}
|
||||
),
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.string,
|
||||
vol.Required(ATTR_STATE): cv.string,
|
||||
}
|
||||
),
|
||||
)
|
||||
],
|
||||
),
|
||||
vol.Optional(const.ATTR_ACTIONS): vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.string,
|
||||
vol.Required(ATTR_SERVICE): cv.string,
|
||||
vol.Optional(CONF_SERVICE_DATA): dict,
|
||||
}
|
||||
)
|
||||
],
|
||||
),
|
||||
vol.Optional(const.ATTR_ENABLED): cv.boolean,
|
||||
vol.Optional(const.ATTR_REMOVE): cv.boolean,
|
||||
}
|
||||
)
|
||||
)
|
||||
async def post(self, request, data):
|
||||
"""Handle config update request."""
|
||||
hass = request.app["hass"]
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
automation_id = None
|
||||
if const.ATTR_AUTOMATION_ID in data:
|
||||
automation_id = data[const.ATTR_AUTOMATION_ID]
|
||||
del data[const.ATTR_AUTOMATION_ID]
|
||||
coordinator.async_update_automation_config(automation_id, data)
|
||||
async_dispatcher_send(hass, "alarmo_update_frontend")
|
||||
return self.json({"success": True})
|
||||
|
||||
|
||||
class AlarmoSensorGroupView(HomeAssistantView):
|
||||
"""Login to Home Assistant cloud."""
|
||||
|
||||
url = "/api/alarmo/sensor_groups"
|
||||
name = "api:alarmo:sensor_groups"
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_GROUP_ID): cv.string,
|
||||
vol.Optional(ATTR_NAME): cv.string,
|
||||
vol.Optional(ATTR_ENTITIES): vol.All(
|
||||
cv.ensure_list, vol.Unique(), [cv.string]
|
||||
),
|
||||
vol.Optional(ATTR_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(ATTR_EVENT_COUNT): cv.positive_int,
|
||||
vol.Optional(const.ATTR_REMOVE): cv.boolean,
|
||||
}
|
||||
)
|
||||
)
|
||||
async def post(self, request, data):
|
||||
"""Handle config update request."""
|
||||
hass = request.app["hass"]
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
group_id = None
|
||||
if ATTR_GROUP_ID in data:
|
||||
group_id = data[ATTR_GROUP_ID]
|
||||
del data[ATTR_GROUP_ID]
|
||||
coordinator.async_update_sensor_group_config(group_id, data)
|
||||
async_dispatcher_send(hass, "alarmo_update_frontend")
|
||||
return self.json({"success": True})
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_config(hass, connection, msg):
|
||||
"""Publish config data."""
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
config = coordinator.store.async_get_config()
|
||||
connection.send_result(msg["id"], config)
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_areas(hass, connection, msg):
|
||||
"""Publish area data."""
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
areas = coordinator.store.async_get_areas()
|
||||
connection.send_result(msg["id"], areas)
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_sensors(hass, connection, msg):
|
||||
"""Publish sensor data."""
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
sensors = coordinator.store.async_get_sensors()
|
||||
for entity_id in sensors.keys():
|
||||
group = coordinator.async_get_group_for_sensor(entity_id)
|
||||
sensors[entity_id]["group"] = group
|
||||
connection.send_result(msg["id"], sensors)
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_users(hass, connection, msg):
|
||||
"""Publish user data."""
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
users = coordinator.store.async_get_users()
|
||||
connection.send_result(msg["id"], users)
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_automations(hass, connection, msg):
|
||||
"""Publish automations data."""
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
automations = coordinator.store.async_get_automations()
|
||||
connection.send_result(msg["id"], automations)
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_alarm_entities(hass, connection, msg):
|
||||
"""Publish alarm entity data."""
|
||||
result = [
|
||||
{"entity_id": entity.entity_id, "area_id": area_id}
|
||||
for (area_id, entity) in hass.data[const.DOMAIN]["areas"].items()
|
||||
]
|
||||
if hass.data[const.DOMAIN]["master"]:
|
||||
result.append(
|
||||
{"entity_id": hass.data[const.DOMAIN]["master"].entity_id, "area_id": 0}
|
||||
)
|
||||
connection.send_result(msg["id"], result)
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_sensor_groups(hass, connection, msg):
|
||||
"""Publish sensor_group data."""
|
||||
coordinator = hass.data[const.DOMAIN]["coordinator"]
|
||||
groups = coordinator.store.async_get_sensor_groups()
|
||||
connection.send_result(msg["id"], groups)
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_countdown(hass, connection, msg):
|
||||
"""Publish countdown time for alarm entity."""
|
||||
entity_id = msg["entity_id"]
|
||||
item = next(
|
||||
(
|
||||
entity
|
||||
for entity in hass.data[const.DOMAIN]["areas"].values()
|
||||
if entity.entity_id == entity_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
if (
|
||||
hass.data[const.DOMAIN]["master"]
|
||||
and not item
|
||||
and hass.data[const.DOMAIN]["master"].entity_id == entity_id
|
||||
):
|
||||
item = hass.data[const.DOMAIN]["master"]
|
||||
|
||||
data = {
|
||||
"delay": item.delay if item else 0,
|
||||
"remaining": round((item.expiration - dt_util.utcnow()).total_seconds(), 2)
|
||||
if item and item.expiration
|
||||
else 0,
|
||||
}
|
||||
connection.send_result(msg["id"], data)
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_get_ready_to_arm_modes(hass, connection, msg):
|
||||
"""Publish ready_to_arm_modes for alarm entity."""
|
||||
entity_id = msg["entity_id"]
|
||||
item = next(
|
||||
(
|
||||
entity
|
||||
for entity in hass.data[const.DOMAIN]["areas"].values()
|
||||
if entity.entity_id == entity_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
if (
|
||||
hass.data[const.DOMAIN]["master"]
|
||||
and not item
|
||||
and hass.data[const.DOMAIN]["master"].entity_id == entity_id
|
||||
):
|
||||
item = hass.data[const.DOMAIN]["master"]
|
||||
|
||||
data = {"modes": item._ready_to_arm_modes if item else None}
|
||||
connection.send_result(msg["id"], data)
|
||||
|
||||
|
||||
async def async_register_websockets(hass):
|
||||
"""Register websocket handlers."""
|
||||
hass.http.register_view(AlarmoConfigView)
|
||||
hass.http.register_view(AlarmoSensorView)
|
||||
hass.http.register_view(AlarmoUserView)
|
||||
hass.http.register_view(AlarmoAutomationView)
|
||||
hass.http.register_view(AlarmoAreaView)
|
||||
hass.http.register_view(AlarmoSensorGroupView)
|
||||
|
||||
async_register_command(hass, handle_subscribe_updates)
|
||||
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/config",
|
||||
websocket_get_config,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): "alarmo/config"}
|
||||
),
|
||||
)
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/areas",
|
||||
websocket_get_areas,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): "alarmo/areas"}
|
||||
),
|
||||
)
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/sensors",
|
||||
websocket_get_sensors,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): "alarmo/sensors"}
|
||||
),
|
||||
)
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/users",
|
||||
websocket_get_users,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): "alarmo/users"}
|
||||
),
|
||||
)
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/automations",
|
||||
websocket_get_automations,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): "alarmo/automations"}
|
||||
),
|
||||
)
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/entities",
|
||||
websocket_get_alarm_entities,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): "alarmo/entities"}
|
||||
),
|
||||
)
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/sensor_groups",
|
||||
websocket_get_sensor_groups,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): "alarmo/sensor_groups"}
|
||||
),
|
||||
)
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/countdown",
|
||||
websocket_get_countdown,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required("type"): "alarmo/countdown",
|
||||
vol.Required("entity_id"): cv.entity_id,
|
||||
}
|
||||
),
|
||||
)
|
||||
async_register_command(
|
||||
hass,
|
||||
"alarmo/ready_to_arm_modes",
|
||||
websocket_get_ready_to_arm_modes,
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required("type"): "alarmo/ready_to_arm_modes",
|
||||
vol.Required("entity_id"): cv.entity_id,
|
||||
}
|
||||
),
|
||||
)
|
||||
114
custom_components/govee/__init__.py
Normal file
114
custom_components/govee/__init__.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""The Govee integration."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from govee_api_laggat import Govee
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
from .learning_storage import GoveeLearningStorage
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
# supported platforms
|
||||
PLATFORMS = ["light"]
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""This setup does nothing, we use the async setup."""
|
||||
hass.states.set("govee.state", "setup called")
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: dict):
|
||||
"""Set up the Govee component."""
|
||||
hass.states.async_set("govee.state", "async_setup called")
|
||||
hass.data[DOMAIN] = {}
|
||||
return True
|
||||
|
||||
|
||||
def is_online(online: bool):
|
||||
"""Log online/offline change."""
|
||||
msg = "API is offline."
|
||||
if online:
|
||||
msg = "API is back online."
|
||||
_LOGGER.warning(msg)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Set up Govee from a config entry."""
|
||||
|
||||
# get vars from ConfigFlow/OptionsFlow
|
||||
config = entry.data
|
||||
options = entry.options
|
||||
api_key = options.get(CONF_API_KEY, config.get(CONF_API_KEY, ""))
|
||||
|
||||
# Setup connection with devices/cloud
|
||||
hub = await Govee.create(
|
||||
api_key, learning_storage=GoveeLearningStorage(hass.config.config_dir)
|
||||
)
|
||||
# keep reference for disposing
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN]["hub"] = hub
|
||||
|
||||
# inform when api is offline/online
|
||||
hub.events.online += is_online
|
||||
|
||||
# Verify that passed in configuration works
|
||||
_, err = await hub.get_devices()
|
||||
if err:
|
||||
_LOGGER.warning("Could not connect to Govee API: %s", err)
|
||||
await hub.rate_limit_delay()
|
||||
await async_unload_entry(hass, entry)
|
||||
raise PlatformNotReady()
|
||||
|
||||
for component in PLATFORMS:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(entry, component)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Unload a config entry."""
|
||||
|
||||
unload_ok = all(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
_unload_component_entry(hass, entry, component)
|
||||
for component in PLATFORMS
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if unload_ok:
|
||||
hub = hass.data[DOMAIN].pop("hub")
|
||||
await hub.close()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
def _unload_component_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, component: str
|
||||
) -> bool:
|
||||
"""Unload an entry for a specific component."""
|
||||
success = False
|
||||
try:
|
||||
success = hass.config_entries.async_forward_entry_unload(entry, component)
|
||||
except ValueError:
|
||||
# probably ValueError: Config entry was never loaded!
|
||||
return success
|
||||
except Exception as ex:
|
||||
_LOGGER.warning(
|
||||
"Continuing on exception when unloading %s component's entry: %s",
|
||||
component,
|
||||
ex,
|
||||
)
|
||||
return success
|
||||
BIN
custom_components/govee/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
custom_components/govee/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
custom_components/govee/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
custom_components/govee/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/govee/__pycache__/config_flow.cpython-312.pyc
Normal file
BIN
custom_components/govee/__pycache__/config_flow.cpython-312.pyc
Normal file
Binary file not shown.
BIN
custom_components/govee/__pycache__/config_flow.cpython-313.pyc
Normal file
BIN
custom_components/govee/__pycache__/config_flow.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/govee/__pycache__/const.cpython-312.pyc
Normal file
BIN
custom_components/govee/__pycache__/const.cpython-312.pyc
Normal file
Binary file not shown.
BIN
custom_components/govee/__pycache__/const.cpython-313.pyc
Normal file
BIN
custom_components/govee/__pycache__/const.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
custom_components/govee/__pycache__/light.cpython-312.pyc
Normal file
BIN
custom_components/govee/__pycache__/light.cpython-312.pyc
Normal file
Binary file not shown.
BIN
custom_components/govee/__pycache__/light.cpython-313.pyc
Normal file
BIN
custom_components/govee/__pycache__/light.cpython-313.pyc
Normal file
Binary file not shown.
210
custom_components/govee/config_flow.py
Normal file
210
custom_components/govee/config_flow.py
Normal file
@@ -0,0 +1,210 @@
|
||||
"""Config flow for Govee integration."""
|
||||
|
||||
import logging
|
||||
|
||||
from govee_api_laggat import Govee, GoveeNoLearningStorage, GoveeError
|
||||
|
||||
from homeassistant import config_entries, core, exceptions
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.const import CONF_API_KEY, CONF_DELAY
|
||||
from homeassistant.core import callback
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import (
|
||||
CONF_DISABLE_ATTRIBUTE_UPDATES,
|
||||
CONF_OFFLINE_IS_OFF,
|
||||
CONF_USE_ASSUMED_STATE,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def validate_api_key(hass: core.HomeAssistant, user_input):
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Return info that you want to store in the config entry.
|
||||
"""
|
||||
api_key = user_input[CONF_API_KEY]
|
||||
async with Govee(api_key, learning_storage=GoveeNoLearningStorage()) as hub:
|
||||
_, error = await hub.get_devices()
|
||||
if error:
|
||||
raise CannotConnect(error)
|
||||
|
||||
# Return info that you want to store in the config entry.
|
||||
return user_input
|
||||
|
||||
|
||||
async def validate_disabled_attribute_updates(hass: core.HomeAssistant, user_input):
|
||||
"""Validate format of the ignore_device_attributes parameter string
|
||||
|
||||
Return info that you want to store in the config entry.
|
||||
"""
|
||||
disable_str = user_input[CONF_DISABLE_ATTRIBUTE_UPDATES]
|
||||
if disable_str:
|
||||
# we have something to check, connect without API key
|
||||
async with Govee("", learning_storage=GoveeNoLearningStorage()) as hub:
|
||||
# this will throw an GoveeError if something fails
|
||||
hub.ignore_device_attributes(disable_str)
|
||||
|
||||
# Return info that you want to store in the config entry.
|
||||
return user_input
|
||||
|
||||
|
||||
@config_entries.HANDLERS.register(DOMAIN)
|
||||
class GoveeFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Govee."""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
user_input = await validate_api_key(self.hass, user_input)
|
||||
|
||||
except CannotConnect as conn_ex:
|
||||
_LOGGER.exception("Cannot connect: %s", conn_ex)
|
||||
errors[CONF_API_KEY] = "cannot_connect"
|
||||
except GoveeError as govee_ex:
|
||||
_LOGGER.exception("Govee library error: %s", govee_ex)
|
||||
errors["base"] = "govee_ex"
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception: %s", ex)
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(title=DOMAIN, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Optional(CONF_DELAY, default=10): cv.positive_int,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry):
|
||||
"""Get the options flow."""
|
||||
return GoveeOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class GoveeOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""Handle options."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self, config_entry):
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
self.options = dict(config_entry.options)
|
||||
|
||||
async def async_step_init(self, user_input=None):
|
||||
"""Manage the options."""
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Manage the options."""
|
||||
# get the current value for API key for comparison and default value
|
||||
old_api_key = self.config_entry.options.get(
|
||||
CONF_API_KEY, self.config_entry.data.get(CONF_API_KEY, "")
|
||||
)
|
||||
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
# check if API Key changed and is valid
|
||||
try:
|
||||
api_key = user_input[CONF_API_KEY]
|
||||
if old_api_key != api_key:
|
||||
user_input = await validate_api_key(self.hass, user_input)
|
||||
|
||||
except CannotConnect as conn_ex:
|
||||
_LOGGER.exception("Cannot connect: %s", conn_ex)
|
||||
errors[CONF_API_KEY] = "cannot_connect"
|
||||
except GoveeError as govee_ex:
|
||||
_LOGGER.exception("Govee library error: %s", govee_ex)
|
||||
errors["base"] = "govee_ex"
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception: %s", ex)
|
||||
errors["base"] = "unknown"
|
||||
|
||||
# check validate_disabled_attribute_updates
|
||||
try:
|
||||
user_input = await validate_disabled_attribute_updates(
|
||||
self.hass, user_input
|
||||
)
|
||||
|
||||
# apply settings to the running instance
|
||||
if DOMAIN in self.hass.data and "hub" in self.hass.data[DOMAIN]:
|
||||
hub = self.hass.data[DOMAIN]["hub"]
|
||||
if hub:
|
||||
disable_str = user_input[CONF_DISABLE_ATTRIBUTE_UPDATES]
|
||||
hub.ignore_device_attributes(disable_str)
|
||||
except GoveeError as govee_ex:
|
||||
_LOGGER.exception(
|
||||
"Wrong input format for validate_disabled_attribute_updates: %s",
|
||||
govee_ex,
|
||||
)
|
||||
errors[
|
||||
CONF_DISABLE_ATTRIBUTE_UPDATES
|
||||
] = "disabled_attribute_updates_wrong"
|
||||
|
||||
if not errors:
|
||||
# update options flow values
|
||||
self.options.update(user_input)
|
||||
return await self._update_options()
|
||||
# for later - extend with options you don't want in config but option flow
|
||||
# return await self.async_step_options_2()
|
||||
|
||||
options_schema = vol.Schema(
|
||||
{
|
||||
# to config flow
|
||||
vol.Required(
|
||||
CONF_API_KEY,
|
||||
default=old_api_key,
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_DELAY,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_DELAY, self.config_entry.data.get(CONF_DELAY, 10)
|
||||
),
|
||||
): cv.positive_int,
|
||||
# to options flow
|
||||
vol.Required(
|
||||
CONF_USE_ASSUMED_STATE,
|
||||
default=self.config_entry.options.get(CONF_USE_ASSUMED_STATE, True),
|
||||
): cv.boolean,
|
||||
vol.Required(
|
||||
CONF_OFFLINE_IS_OFF,
|
||||
default=self.config_entry.options.get(CONF_OFFLINE_IS_OFF, False),
|
||||
): cv.boolean,
|
||||
# TODO: validator doesn't work, change to list?
|
||||
vol.Optional(
|
||||
CONF_DISABLE_ATTRIBUTE_UPDATES,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_DISABLE_ATTRIBUTE_UPDATES, ""
|
||||
),
|
||||
): cv.string,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=options_schema,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def _update_options(self):
|
||||
"""Update config entry options."""
|
||||
return self.async_create_entry(title=DOMAIN, data=self.options)
|
||||
|
||||
|
||||
class CannotConnect(exceptions.HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
10
custom_components/govee/const.py
Normal file
10
custom_components/govee/const.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Constants for the Govee LED strips integration."""
|
||||
|
||||
DOMAIN = "govee"
|
||||
|
||||
CONF_DISABLE_ATTRIBUTE_UPDATES = "disable_attribute_updates"
|
||||
CONF_OFFLINE_IS_OFF = "offline_is_off"
|
||||
CONF_USE_ASSUMED_STATE = "use_assumed_state"
|
||||
|
||||
COLOR_TEMP_KELVIN_MIN = 2000
|
||||
COLOR_TEMP_KELVIN_MAX = 9000
|
||||
1
custom_components/govee/govee
Normal file
1
custom_components/govee/govee
Normal file
@@ -0,0 +1 @@
|
||||
/workspaces/hacs-govee/custom_components/govee
|
||||
66
custom_components/govee/learning_storage.py
Normal file
66
custom_components/govee/learning_storage.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""The Govee learned storage yaml file manager."""
|
||||
|
||||
from dataclasses import asdict
|
||||
import logging
|
||||
|
||||
import dacite
|
||||
from govee_api_laggat import GoveeAbstractLearningStorage, GoveeLearnedInfo
|
||||
import yaml
|
||||
|
||||
from homeassistant.util.yaml import load_yaml, save_yaml
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
LEARNING_STORAGE_YAML = "/govee_learning.yaml"
|
||||
|
||||
|
||||
class GoveeLearningStorage(GoveeAbstractLearningStorage):
|
||||
"""The govee_api_laggat library uses this to store learned information about lights."""
|
||||
|
||||
def __init__(self, config_dir, *args, **kwargs):
|
||||
"""Get the config directory."""
|
||||
super().__init__(*args, **kwargs)
|
||||
self._config_dir = config_dir
|
||||
|
||||
async def read(self):
|
||||
"""Restore from yaml file."""
|
||||
learned_info = {}
|
||||
try:
|
||||
learned_dict = load_yaml(self._config_dir + LEARNING_STORAGE_YAML)
|
||||
learned_info = {
|
||||
device: dacite.from_dict(
|
||||
data_class=GoveeLearnedInfo, data=learned_dict[device]
|
||||
)
|
||||
for device in learned_dict
|
||||
}
|
||||
_LOGGER.info(
|
||||
"Loaded learning information from %s.",
|
||||
self._config_dir + LEARNING_STORAGE_YAML,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
_LOGGER.warning(
|
||||
"There is no %s file containing learned information about your devices. "
|
||||
+ "This is normal for first start of Govee integration.",
|
||||
self._config_dir + LEARNING_STORAGE_YAML,
|
||||
)
|
||||
except (
|
||||
dacite.DaciteError,
|
||||
TypeError,
|
||||
UnicodeDecodeError,
|
||||
yaml.YAMLError,
|
||||
) as ex:
|
||||
_LOGGER.warning(
|
||||
"The %s file containing learned information about your devices is invalid: %s. "
|
||||
+ "Learning starts from scratch.",
|
||||
self._config_dir + LEARNING_STORAGE_YAML,
|
||||
ex,
|
||||
)
|
||||
return learned_info
|
||||
|
||||
async def write(self, learned_info):
|
||||
"""Save to yaml file."""
|
||||
leaned_dict = {device: asdict(learned_info[device]) for device in learned_info}
|
||||
save_yaml(self._config_dir + LEARNING_STORAGE_YAML, leaned_dict)
|
||||
_LOGGER.info(
|
||||
"Stored learning information to %s.",
|
||||
self._config_dir + LEARNING_STORAGE_YAML,
|
||||
)
|
||||
325
custom_components/govee/light.py
Normal file
325
custom_components/govee/light.py
Normal file
@@ -0,0 +1,325 @@
|
||||
"""Govee platform."""
|
||||
|
||||
from datetime import timedelta, datetime
|
||||
import logging
|
||||
|
||||
from govee_api_laggat import Govee, GoveeDevice, GoveeError
|
||||
from govee_api_laggat.govee_dtos import GoveeSource
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP,
|
||||
ATTR_HS_COLOR,
|
||||
SUPPORT_BRIGHTNESS,
|
||||
SUPPORT_COLOR,
|
||||
SUPPORT_COLOR_TEMP,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_DELAY
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import color
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
CONF_OFFLINE_IS_OFF,
|
||||
CONF_USE_ASSUMED_STATE,
|
||||
COLOR_TEMP_KELVIN_MIN,
|
||||
COLOR_TEMP_KELVIN_MAX,
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, entry, async_add_entities):
|
||||
"""Set up the Govee Light platform."""
|
||||
_LOGGER.debug("Setting up Govee lights")
|
||||
config = entry.data
|
||||
options = entry.options
|
||||
hub = hass.data[DOMAIN]["hub"]
|
||||
|
||||
# refresh
|
||||
update_interval = timedelta(
|
||||
seconds=options.get(CONF_DELAY, config.get(CONF_DELAY, 10))
|
||||
)
|
||||
coordinator = GoveeDataUpdateCoordinator(
|
||||
hass, _LOGGER, update_interval=update_interval, config_entry=entry
|
||||
)
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
hub.events.new_device += lambda device: add_entity(
|
||||
async_add_entities, hub, entry, coordinator, device
|
||||
)
|
||||
await coordinator.async_refresh()
|
||||
|
||||
# Add devices
|
||||
for device in hub.devices:
|
||||
add_entity(async_add_entities, hub, entry, coordinator, device)
|
||||
# async_add_entities(
|
||||
# [
|
||||
# GoveeLightEntity(hub, entry.title, coordinator, device)
|
||||
# for device in hub.devices
|
||||
# ],
|
||||
# update_before_add=False,
|
||||
# )
|
||||
|
||||
|
||||
def add_entity(async_add_entities, hub, entry, coordinator, device):
|
||||
async_add_entities(
|
||||
[GoveeLightEntity(hub, entry.title, coordinator, device)],
|
||||
update_before_add=False,
|
||||
)
|
||||
|
||||
|
||||
class GoveeDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Device state update handler."""
|
||||
|
||||
def __init__(self, hass, logger, update_interval=None, *, config_entry):
|
||||
"""Initialize global data updater."""
|
||||
self._config_entry = config_entry
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
logger,
|
||||
name=DOMAIN,
|
||||
update_interval=update_interval,
|
||||
update_method=self._async_update,
|
||||
)
|
||||
|
||||
@property
|
||||
def use_assumed_state(self):
|
||||
"""Use assumed states."""
|
||||
return self._config_entry.options.get(CONF_USE_ASSUMED_STATE, True)
|
||||
|
||||
@property
|
||||
def config_offline_is_off(self):
|
||||
"""Interpret offline led's as off (global config)."""
|
||||
return self._config_entry.options.get(CONF_OFFLINE_IS_OFF, False)
|
||||
|
||||
async def _async_update(self):
|
||||
"""Fetch data."""
|
||||
self.logger.debug("_async_update")
|
||||
if "govee" not in self.hass.data:
|
||||
raise UpdateFailed("Govee instance not available")
|
||||
try:
|
||||
hub = self.hass.data[DOMAIN]["hub"]
|
||||
|
||||
if not hub.online:
|
||||
# when offline, check connection, this will set hub.online
|
||||
await hub.check_connection()
|
||||
|
||||
if hub.online:
|
||||
# set global options to library
|
||||
if self.config_offline_is_off:
|
||||
hub.config_offline_is_off = True
|
||||
else:
|
||||
hub.config_offline_is_off = None # allow override in learning info
|
||||
|
||||
# govee will change this to a single request in 2021
|
||||
device_states = await hub.get_states()
|
||||
for device in device_states:
|
||||
if device.error:
|
||||
self.logger.warning(
|
||||
"update failed for %s: %s", device.device, device.error
|
||||
)
|
||||
return device_states
|
||||
except GoveeError as ex:
|
||||
raise UpdateFailed(f"Exception on getting states: {ex}") from ex
|
||||
|
||||
|
||||
class GoveeLightEntity(LightEntity):
|
||||
"""Representation of a stateful light entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hub: Govee,
|
||||
title: str,
|
||||
coordinator: GoveeDataUpdateCoordinator,
|
||||
device: GoveeDevice,
|
||||
):
|
||||
"""Init a Govee light strip."""
|
||||
self._hub = hub
|
||||
self._title = title
|
||||
self._coordinator = coordinator
|
||||
self._device = device
|
||||
|
||||
@property
|
||||
def entity_registry_enabled_default(self):
|
||||
"""Return if the entity should be enabled when first added to the entity registry."""
|
||||
return True
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Connect to dispatcher listening for entity data notifications."""
|
||||
self._coordinator.async_add_listener(self.async_write_ha_state)
|
||||
|
||||
@property
|
||||
def _state(self):
|
||||
"""Lights internal state."""
|
||||
return self._device # self._hub.state(self._device)
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag supported features."""
|
||||
support_flags = 0
|
||||
if self._device.support_brightness:
|
||||
support_flags |= SUPPORT_BRIGHTNESS
|
||||
if self._device.support_color:
|
||||
support_flags |= SUPPORT_COLOR
|
||||
if self._device.support_color_tem:
|
||||
support_flags |= SUPPORT_COLOR_TEMP
|
||||
return support_flags
|
||||
|
||||
async def async_turn_on(self, **kwargs):
|
||||
"""Turn device on."""
|
||||
_LOGGER.debug(
|
||||
"async_turn_on for Govee light %s, kwargs: %s", self._device.device, kwargs
|
||||
)
|
||||
err = None
|
||||
|
||||
just_turn_on = True
|
||||
if ATTR_HS_COLOR in kwargs:
|
||||
hs_color = kwargs.pop(ATTR_HS_COLOR)
|
||||
just_turn_on = False
|
||||
col = color.color_hs_to_RGB(hs_color[0], hs_color[1])
|
||||
_, err = await self._hub.set_color(self._device, col)
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
brightness = kwargs.pop(ATTR_BRIGHTNESS)
|
||||
just_turn_on = False
|
||||
bright_set = brightness - 1
|
||||
_, err = await self._hub.set_brightness(self._device, bright_set)
|
||||
if ATTR_COLOR_TEMP in kwargs:
|
||||
color_temp = kwargs.pop(ATTR_COLOR_TEMP)
|
||||
just_turn_on = False
|
||||
color_temp_kelvin = color.color_temperature_mired_to_kelvin(color_temp)
|
||||
if color_temp_kelvin > COLOR_TEMP_KELVIN_MAX:
|
||||
color_temp_kelvin = COLOR_TEMP_KELVIN_MAX
|
||||
elif color_temp_kelvin < COLOR_TEMP_KELVIN_MIN:
|
||||
color_temp_kelvin = COLOR_TEMP_KELVIN_MIN
|
||||
_, err = await self._hub.set_color_temp(self._device, color_temp_kelvin)
|
||||
|
||||
# if there is no known specific command - turn on
|
||||
if just_turn_on:
|
||||
_, err = await self._hub.turn_on(self._device)
|
||||
# debug log unknown commands
|
||||
if kwargs:
|
||||
_LOGGER.debug(
|
||||
"async_turn_on doesnt know how to handle kwargs: %s", repr(kwargs)
|
||||
)
|
||||
# warn on any error
|
||||
if err:
|
||||
_LOGGER.warning(
|
||||
"async_turn_on failed with '%s' for %s, kwargs: %s",
|
||||
err,
|
||||
self._device.device,
|
||||
kwargs,
|
||||
)
|
||||
|
||||
async def async_turn_off(self, **kwargs):
|
||||
"""Turn device off."""
|
||||
_LOGGER.debug("async_turn_off for Govee light %s", self._device.device)
|
||||
await self._hub.turn_off(self._device)
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the unique ID."""
|
||||
return f"govee_{self._title}_{self._device.device}"
|
||||
|
||||
@property
|
||||
def device_id(self):
|
||||
"""Return the ID."""
|
||||
return self.unique_id
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name."""
|
||||
return self._device.device_name
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device info."""
|
||||
return {
|
||||
"identifiers": {(DOMAIN, self.device_id)},
|
||||
"name": self.name,
|
||||
"manufacturer": "Govee",
|
||||
"model": self._device.model,
|
||||
"via_device": (DOMAIN, "Govee API (cloud)"),
|
||||
}
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if device is on."""
|
||||
return self._device.power_state
|
||||
|
||||
@property
|
||||
def assumed_state(self):
|
||||
"""
|
||||
Return true if the state is assumed.
|
||||
|
||||
This can be disabled in options.
|
||||
"""
|
||||
return (
|
||||
self._coordinator.use_assumed_state
|
||||
and self._device.source == GoveeSource.HISTORY
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return if light is available."""
|
||||
return self._device.online
|
||||
|
||||
@property
|
||||
def hs_color(self):
|
||||
"""Return the hs color value."""
|
||||
return color.color_RGB_to_hs(
|
||||
self._device.color[0],
|
||||
self._device.color[1],
|
||||
self._device.color[2],
|
||||
)
|
||||
|
||||
@property
|
||||
def rgb_color(self):
|
||||
"""Return the rgb color value."""
|
||||
return [
|
||||
self._device.color[0],
|
||||
self._device.color[1],
|
||||
self._device.color[2],
|
||||
]
|
||||
|
||||
@property
|
||||
def brightness(self):
|
||||
"""Return the brightness value."""
|
||||
# govee is reporting 0 to 254 - home assistant uses 1 to 255
|
||||
return self._device.brightness + 1
|
||||
|
||||
@property
|
||||
def color_temp(self):
|
||||
"""Return the color_temp of the light."""
|
||||
if not self._device.color_temp:
|
||||
return None
|
||||
return color.color_temperature_kelvin_to_mired(self._device.color_temp)
|
||||
|
||||
@property
|
||||
def min_mireds(self):
|
||||
"""Return the coldest color_temp that this light supports."""
|
||||
return color.color_temperature_kelvin_to_mired(COLOR_TEMP_KELVIN_MAX)
|
||||
|
||||
@property
|
||||
def max_mireds(self):
|
||||
"""Return the warmest color_temp that this light supports."""
|
||||
return color.color_temperature_kelvin_to_mired(COLOR_TEMP_KELVIN_MIN)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the device state attributes."""
|
||||
return {
|
||||
# rate limiting information on Govee API
|
||||
"rate_limit_total": self._hub.rate_limit_total,
|
||||
"rate_limit_remaining": self._hub.rate_limit_remaining,
|
||||
"rate_limit_reset_seconds": round(self._hub.rate_limit_reset_seconds, 2),
|
||||
"rate_limit_reset": datetime.fromtimestamp(
|
||||
self._hub.rate_limit_reset
|
||||
).isoformat(),
|
||||
"rate_limit_on": self._hub.rate_limit_on,
|
||||
# general information
|
||||
"manufacturer": "Govee",
|
||||
"model": self._device.model,
|
||||
}
|
||||
15
custom_components/govee/manifest.json
Normal file
15
custom_components/govee/manifest.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"domain": "govee",
|
||||
"name": "Govee",
|
||||
"codeowners": ["@LaggAt"],
|
||||
"config_flow": true,
|
||||
"dependencies": [],
|
||||
"documentation": "https://github.com/LaggAt/hacs-govee/blob/master/README.md",
|
||||
"homekit": {},
|
||||
"iot_class": "cloud_polling",
|
||||
"issue_tracker": "https://github.com/LaggAt/hacs-govee/issues",
|
||||
"requirements": ["govee-api-laggat==0.2.2", "dacite==1.8.0"],
|
||||
"ssdp": [],
|
||||
"version": "2023.11.1",
|
||||
"zeroconf": []
|
||||
}
|
||||
42
custom_components/govee/strings.json
Normal file
42
custom_components/govee/strings.json
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"title": "Govee",
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Already configured. Only a single configuration possible."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Cannot connect. Is the API-Key correct and the internet connection working?",
|
||||
"unknown": "Unknown Error."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "API Key",
|
||||
"delay": "Poll Interval"
|
||||
},
|
||||
"title": "",
|
||||
"description": "Get your API Key from the Govee Home App. For Details see https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"error": {
|
||||
"cannot_connect": "Cannot connect. Is the API-Key correct and the internet connection working?",
|
||||
"unknown": "Unknown Error.",
|
||||
"disabled_attribute_updates_wrong": "Wrong format, see README above."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "API Key (requires restart)",
|
||||
"delay": "Poll Interval (requires restart)",
|
||||
"use_assumed_state": "Use 'assumed state' (two buttons). Default: True",
|
||||
"offline_is_off": "When a led is offline, show it as off (default doesn't change state). Default: False",
|
||||
"disable_attribute_updates": "DISABLE state updates. Space to disable. Read the README above!"
|
||||
},
|
||||
"title": "Options",
|
||||
"description": "Configure the Govee integration. For Details see https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
41
custom_components/govee/translations/de.json
Normal file
41
custom_components/govee/translations/de.json
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"title": "Govee",
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Bereits eingerichtet. Es ist nur eine Konfiguration möglich."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Keine Verbindung möglich. Ist der API-Key richtig und die Internet Verbindung in Ordnung?",
|
||||
"unknown": "Unbekannter Fehler."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "API Key",
|
||||
"delay": "Abfrage-Intervall"
|
||||
},
|
||||
"description": "Den API Key bekommen Sie in der Govee Home App. Details dazu hier: https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"error": {
|
||||
"cannot_connect": "Keine Verbindung möglich. Ist der API-Key richtig und die Internet Verbindung in Ordnung?",
|
||||
"unknown": "Unbekannter Fehler.",
|
||||
"disabled_attribute_updates_wrong": "Format ist inkorrekt, bitte README lesen."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "API Key (benötigt Neustart)",
|
||||
"delay": "Abfrage-Intervall (benötigt Neustart)",
|
||||
"use_assumed_state": "Verwende 'angenommenen Zustand' (zwei Buttons). Standard: True",
|
||||
"offline_is_off": "Wenn eine LED offline ist, zeige sie als Aus (Standard ändert den Status nicht). Standard: False",
|
||||
"disable_attribute_updates": "Status updates verhindern. Leertaste zum ausschalten. Bitte das README oben dazu lesen."
|
||||
},
|
||||
"title": "Einstellungen",
|
||||
"description": "Einstellen der Govee Integration. Details dazu hier: https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
41
custom_components/govee/translations/en.json
Normal file
41
custom_components/govee/translations/en.json
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"title": "Govee",
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Already configured. Only a single configuration possible."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Cannot connect. Is the API-Key correct and the internet connection working?",
|
||||
"unknown": "Unknown Error."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "API Key",
|
||||
"delay": "Poll Interval"
|
||||
},
|
||||
"description": "Get your API Key from the Govee Home App. For Details see https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"error": {
|
||||
"cannot_connect": "Cannot connect. Is the API-Key correct and the internet connection working?",
|
||||
"unknown": "Unknown Error.",
|
||||
"disabled_attribute_updates_wrong": "Wrong format, see README above."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "API Key (requires restart)",
|
||||
"delay": "Poll Interval (requires restart)",
|
||||
"use_assumed_state": "Use 'assumed state' (two buttons). Default: True",
|
||||
"offline_is_off": "When a led is offline, show it as off (default doesn't change state). Default: False",
|
||||
"disable_attribute_updates": "DISABLE state updates. Space to disable. Read the README above!"
|
||||
},
|
||||
"title": "Options",
|
||||
"description": "Configure the Govee integration. For Details see https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
41
custom_components/govee/translations/fr.json
Normal file
41
custom_components/govee/translations/fr.json
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"title": "Govee",
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Déjà configuré. Une seule configuration possible."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Impossible de se connecter. La clé d'API est-elle correcte et la connexion Internet fonctionne-t-elle?",
|
||||
"unknown": "Erreure inconnue."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "clé d'API",
|
||||
"delay": "Intervalle d'interrogation"
|
||||
},
|
||||
"description": "Obtenez votre clé API à partir de l'application Govee Home. Pour plus de détails, visitez https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"error": {
|
||||
"cannot_connect": "Impossible de se connecter. La clé d'API est-elle correcte et la connexion Internet fonctionne-t-elle?",
|
||||
"unknown": "Erreure inconnue.",
|
||||
"disabled_attribute_updates_wrong": "Format incorrect, voir le 'lisez-moi' ci-dessus."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "Clé d'API (nécessite un redémarrage)",
|
||||
"delay": "Intervalle d'interrogation (nécessite un redémarrage)",
|
||||
"use_assumed_state": "Utiliser 'état supposé' (deux boutons). Par défaut : Vrai",
|
||||
"offline_is_off": "Lorsqu'une DEL est hors ligne, affichez-la comme éteinte (la valeur par défaut ne change pas d'état). Par défaut : Faux",
|
||||
"disable_attribute_updates": "DÉSACTIVER les mises à jour d'état. Espace pour désactiver. Lisez le 'lisez-moi' ci-dessus !"
|
||||
},
|
||||
"title": "Options",
|
||||
"description": "Configurez l'intégration Govee. Pour plus de détails, visitez https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
41
custom_components/govee/translations/pt-BR.json
Normal file
41
custom_components/govee/translations/pt-BR.json
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"title": "Govee",
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Já configurado. Apenas uma única configuração é possível."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Não pode conectar. A API-Key está correta e a conexão com a Internet está funcionando?",
|
||||
"unknown": "Erro desconhecido."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "Chave de API",
|
||||
"delay": "Intervalo de escaneamento"
|
||||
},
|
||||
"description": "Obtenha sua chave de API do aplicativo Govee Home. Para detalhes consulte https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"error": {
|
||||
"cannot_connect": "Não pode conectar. A API-Key está correta e a conexão com a Internet está funcionando?",
|
||||
"unknown": "Erro desconhecido.",
|
||||
"disabled_attribute_updates_wrong": "Formato errado, veja README acima."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "Chave de API (requer reinicialização)",
|
||||
"delay": "Intervalo de escaneamento (requer reinicialização)",
|
||||
"use_assumed_state": "Use 'estado presumido' (dois botões). Padrão: true",
|
||||
"offline_is_off": "Quando um led estiver offline, mostre-o como desligado (o padrão não muda de estado). Padrão: False",
|
||||
"disable_attribute_updates": "DESATIVAR atualizações de estado. Espaço para desativar. Leia o README acima!"
|
||||
},
|
||||
"title": "Opções",
|
||||
"description": "Configure a integração do Govee. Para detalhes consulte https://github.com/LaggAt/hacs-govee/blob/master/README.md"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
229
custom_components/hacs/__init__.py
Normal file
229
custom_components/hacs/__init__.py
Normal file
@@ -0,0 +1,229 @@
|
||||
"""HACS gives you a powerful UI to handle downloads of all your custom needs.
|
||||
|
||||
For more details about this integration, please refer to the documentation at
|
||||
https://hacs.xyz/
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException, GitHub, GitHubAPI
|
||||
from aiogithubapi.const import ACCEPT_HEADERS
|
||||
from awesomeversion import AwesomeVersion
|
||||
from homeassistant.components.frontend import async_remove_panel
|
||||
from homeassistant.components.lovelace.system_health import system_health_info
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import Platform, __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity_registry import async_get as async_get_entity_registry
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.loader import async_get_integration
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN, HACS_SYSTEM_ID, MINIMUM_HA_VERSION, STARTUP
|
||||
from .data_client import HacsDataClient
|
||||
from .enums import HacsDisabledReason, HacsStage, LovelaceMode
|
||||
from .frontend import async_register_frontend
|
||||
from .utils.data import HacsData
|
||||
from .utils.queue_manager import QueueManager
|
||||
from .utils.version import version_left_higher_or_equal_then_right
|
||||
from .websocket import async_register_websocket_commands
|
||||
|
||||
PLATFORMS = [Platform.SWITCH, Platform.UPDATE]
|
||||
|
||||
|
||||
async def _async_initialize_integration(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
) -> bool:
|
||||
"""Initialize the integration"""
|
||||
hass.data[DOMAIN] = hacs = HacsBase()
|
||||
hacs.enable_hacs()
|
||||
|
||||
if config_entry.source == SOURCE_IMPORT:
|
||||
# Import is not supported
|
||||
hass.async_create_task(hass.config_entries.async_remove(config_entry.entry_id))
|
||||
return False
|
||||
|
||||
hacs.configuration.update_from_dict(
|
||||
{
|
||||
"config_entry": config_entry,
|
||||
**config_entry.data,
|
||||
**config_entry.options,
|
||||
},
|
||||
)
|
||||
|
||||
integration = await async_get_integration(hass, DOMAIN)
|
||||
|
||||
hacs.set_stage(None)
|
||||
|
||||
hacs.log.info(STARTUP, integration.version)
|
||||
|
||||
clientsession = async_get_clientsession(hass)
|
||||
|
||||
hacs.integration = integration
|
||||
hacs.version = integration.version
|
||||
hacs.configuration.dev = integration.version == "0.0.0"
|
||||
hacs.hass = hass
|
||||
hacs.queue = QueueManager(hass=hass)
|
||||
hacs.data = HacsData(hacs=hacs)
|
||||
hacs.data_client = HacsDataClient(
|
||||
session=clientsession,
|
||||
client_name=f"HACS/{integration.version}",
|
||||
)
|
||||
hacs.system.running = True
|
||||
hacs.session = clientsession
|
||||
|
||||
hacs.core.lovelace_mode = LovelaceMode.YAML
|
||||
try:
|
||||
lovelace_info = await system_health_info(hacs.hass)
|
||||
hacs.core.lovelace_mode = LovelaceMode(lovelace_info.get("mode", "yaml"))
|
||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
# If this happens, the users YAML is not valid, we assume YAML mode
|
||||
pass
|
||||
hacs.core.config_path = hacs.hass.config.path()
|
||||
|
||||
if hacs.core.ha_version is None:
|
||||
hacs.core.ha_version = AwesomeVersion(HAVERSION)
|
||||
|
||||
## Legacy GitHub client
|
||||
hacs.github = GitHub(
|
||||
hacs.configuration.token,
|
||||
clientsession,
|
||||
headers={
|
||||
"User-Agent": f"HACS/{hacs.version}",
|
||||
"Accept": ACCEPT_HEADERS["preview"],
|
||||
},
|
||||
)
|
||||
|
||||
## New GitHub client
|
||||
hacs.githubapi = GitHubAPI(
|
||||
token=hacs.configuration.token,
|
||||
session=clientsession,
|
||||
**{"client_name": f"HACS/{hacs.version}"},
|
||||
)
|
||||
|
||||
async def async_startup():
|
||||
"""HACS startup tasks."""
|
||||
hacs.enable_hacs()
|
||||
|
||||
try:
|
||||
import custom_components.custom_updater
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
hacs.log.critical(
|
||||
"HACS cannot be used with custom_updater. "
|
||||
"To use HACS you need to remove custom_updater from `custom_components`",
|
||||
)
|
||||
|
||||
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
|
||||
return False
|
||||
|
||||
if not version_left_higher_or_equal_then_right(
|
||||
hacs.core.ha_version.string,
|
||||
MINIMUM_HA_VERSION,
|
||||
):
|
||||
hacs.log.critical(
|
||||
"You need HA version %s or newer to use this integration.",
|
||||
MINIMUM_HA_VERSION,
|
||||
)
|
||||
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
|
||||
return False
|
||||
|
||||
if not await hacs.data.restore():
|
||||
hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||
return False
|
||||
|
||||
hacs.set_active_categories()
|
||||
|
||||
async_register_websocket_commands(hass)
|
||||
await async_register_frontend(hass, hacs)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
hacs.set_stage(HacsStage.SETUP)
|
||||
if hacs.system.disabled:
|
||||
return False
|
||||
|
||||
hacs.set_stage(HacsStage.WAITING)
|
||||
hacs.log.info("Setup complete, waiting for Home Assistant before startup tasks starts")
|
||||
|
||||
# Schedule startup tasks
|
||||
async_at_start(hass=hass, at_start_cb=hacs.startup_tasks)
|
||||
|
||||
return not hacs.system.disabled
|
||||
|
||||
async def async_try_startup(_=None):
|
||||
"""Startup wrapper for yaml config."""
|
||||
try:
|
||||
startup_result = await async_startup()
|
||||
except AIOGitHubAPIException:
|
||||
startup_result = False
|
||||
if not startup_result:
|
||||
if hacs.system.disabled_reason != HacsDisabledReason.INVALID_TOKEN:
|
||||
hacs.log.info("Could not setup HACS, trying again in 15 min")
|
||||
async_call_later(hass, 900, async_try_startup)
|
||||
return
|
||||
hacs.enable_hacs()
|
||||
|
||||
await async_try_startup()
|
||||
|
||||
# Remove old (v0-v1) sensor if it exists, can be removed in v3
|
||||
er = async_get_entity_registry(hass)
|
||||
if old_sensor := er.async_get_entity_id("sensor", DOMAIN, HACS_SYSTEM_ID):
|
||||
er.async_remove(old_sensor)
|
||||
|
||||
# Mischief managed!
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
config_entry.async_on_unload(config_entry.add_update_listener(async_reload_entry))
|
||||
setup_result = await _async_initialize_integration(hass=hass, config_entry=config_entry)
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
return setup_result and not hacs.system.disabled
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Handle removal of an entry."""
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
|
||||
if hacs.queue.has_pending_tasks:
|
||||
hacs.log.warning("Pending tasks, can not unload, try again later.")
|
||||
return False
|
||||
|
||||
# Clear out pending queue
|
||||
hacs.queue.clear()
|
||||
|
||||
for task in hacs.recurring_tasks:
|
||||
# Cancel all pending tasks
|
||||
task()
|
||||
|
||||
# Store data
|
||||
await hacs.data.async_write(force=True)
|
||||
|
||||
try:
|
||||
if hass.data.get("frontend_panels", {}).get("hacs"):
|
||||
hacs.log.info("Removing sidepanel")
|
||||
async_remove_panel(hass, "hacs")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
|
||||
hacs.set_stage(None)
|
||||
hacs.disable_hacs(HacsDisabledReason.REMOVED)
|
||||
|
||||
hass.data.pop(DOMAIN, None)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_reload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Reload the HACS config entry."""
|
||||
if not await async_unload_entry(hass, config_entry):
|
||||
return
|
||||
await async_setup_entry(hass, config_entry)
|
||||
BIN
custom_components/hacs/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/base.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/base.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/config_flow.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/config_flow.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/const.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/const.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/coordinator.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/coordinator.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/data_client.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/data_client.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/diagnostics.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/diagnostics.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/entity.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/entity.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/enums.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/enums.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/exceptions.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/exceptions.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/frontend.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/frontend.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/repairs.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/repairs.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/switch.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/switch.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/system_health.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/system_health.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/types.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/types.cpython-313.pyc
Normal file
Binary file not shown.
BIN
custom_components/hacs/__pycache__/update.cpython-313.pyc
Normal file
BIN
custom_components/hacs/__pycache__/update.cpython-313.pyc
Normal file
Binary file not shown.
1110
custom_components/hacs/base.py
Normal file
1110
custom_components/hacs/base.py
Normal file
File diff suppressed because it is too large
Load Diff
225
custom_components/hacs/config_flow.py
Normal file
225
custom_components/hacs/config_flow.py
Normal file
@@ -0,0 +1,225 @@
|
||||
"""Adds config flow for HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aiogithubapi import (
|
||||
GitHubDeviceAPI,
|
||||
GitHubException,
|
||||
GitHubLoginDeviceModel,
|
||||
GitHubLoginOauthModel,
|
||||
)
|
||||
from aiogithubapi.common.const import OAUTH_USER_LOGIN
|
||||
from awesomeversion import AwesomeVersion
|
||||
from homeassistant.config_entries import ConfigFlow, OptionsFlow
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import UnknownFlow
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.loader import async_get_integration
|
||||
import voluptuous as vol
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import CLIENT_ID, DOMAIN, LOCALE, MINIMUM_HA_VERSION
|
||||
from .utils.configuration_schema import (
|
||||
APPDAEMON,
|
||||
COUNTRY,
|
||||
SIDEPANEL_ICON,
|
||||
SIDEPANEL_TITLE,
|
||||
)
|
||||
from .utils.logger import LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
class HacsFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for HACS."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
hass: HomeAssistant
|
||||
activation_task: asyncio.Task | None = None
|
||||
device: GitHubDeviceAPI | None = None
|
||||
|
||||
_registration: GitHubLoginDeviceModel | None = None
|
||||
_activation: GitHubLoginOauthModel | None = None
|
||||
_reauth: bool = False
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
self._errors = {}
|
||||
self._user_input = {}
|
||||
|
||||
async def async_step_user(self, user_input):
|
||||
"""Handle a flow initialized by the user."""
|
||||
self._errors = {}
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
if self.hass.data.get(DOMAIN):
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
if user_input:
|
||||
if [x for x in user_input if x.startswith("acc_") and not user_input[x]]:
|
||||
self._errors["base"] = "acc"
|
||||
return await self._show_config_form(user_input)
|
||||
|
||||
self._user_input = user_input
|
||||
|
||||
return await self.async_step_device(user_input)
|
||||
|
||||
# Initial form
|
||||
return await self._show_config_form(user_input)
|
||||
|
||||
async def async_step_device(self, _user_input):
|
||||
"""Handle device steps."""
|
||||
|
||||
async def _wait_for_activation() -> None:
|
||||
try:
|
||||
response = await self.device.activation(device_code=self._registration.device_code)
|
||||
self._activation = response.data
|
||||
finally:
|
||||
|
||||
async def _progress():
|
||||
with suppress(UnknownFlow):
|
||||
await self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
|
||||
|
||||
if not self.device:
|
||||
integration = await async_get_integration(self.hass, DOMAIN)
|
||||
self.device = GitHubDeviceAPI(
|
||||
client_id=CLIENT_ID,
|
||||
session=aiohttp_client.async_get_clientsession(self.hass),
|
||||
**{"client_name": f"HACS/{integration.version}"},
|
||||
)
|
||||
try:
|
||||
response = await self.device.register()
|
||||
self._registration = response.data
|
||||
except GitHubException as exception:
|
||||
LOGGER.exception(exception)
|
||||
return self.async_abort(reason="could_not_register")
|
||||
|
||||
if self.activation_task is None:
|
||||
self.activation_task = self.hass.async_create_task(_wait_for_activation())
|
||||
|
||||
if self.activation_task.done():
|
||||
if (exception := self.activation_task.exception()) is not None:
|
||||
LOGGER.exception(exception)
|
||||
return self.async_show_progress_done(next_step_id="could_not_register")
|
||||
return self.async_show_progress_done(next_step_id="device_done")
|
||||
|
||||
show_progress_kwargs = {
|
||||
"step_id": "device",
|
||||
"progress_action": "wait_for_device",
|
||||
"description_placeholders": {
|
||||
"url": OAUTH_USER_LOGIN,
|
||||
"code": self._registration.user_code,
|
||||
},
|
||||
"progress_task": self.activation_task,
|
||||
}
|
||||
return self.async_show_progress(**show_progress_kwargs)
|
||||
|
||||
async def _show_config_form(self, user_input):
|
||||
"""Show the configuration form to edit location data."""
|
||||
|
||||
if not user_input:
|
||||
user_input = {}
|
||||
|
||||
if AwesomeVersion(HAVERSION) < MINIMUM_HA_VERSION:
|
||||
return self.async_abort(
|
||||
reason="min_ha_version",
|
||||
description_placeholders={"version": MINIMUM_HA_VERSION},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required("acc_logs", default=user_input.get("acc_logs", False)): bool,
|
||||
vol.Required("acc_addons", default=user_input.get("acc_addons", False)): bool,
|
||||
vol.Required(
|
||||
"acc_untested", default=user_input.get("acc_untested", False)
|
||||
): bool,
|
||||
vol.Required("acc_disable", default=user_input.get("acc_disable", False)): bool,
|
||||
}
|
||||
),
|
||||
errors=self._errors,
|
||||
)
|
||||
|
||||
async def async_step_device_done(self, user_input: dict[str, bool] | None = None):
|
||||
"""Handle device steps"""
|
||||
if self._reauth:
|
||||
existing_entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||
self.hass.config_entries.async_update_entry(
|
||||
existing_entry, data={**existing_entry.data, "token": self._activation.access_token}
|
||||
)
|
||||
await self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
return self.async_create_entry(
|
||||
title="",
|
||||
data={
|
||||
"token": self._activation.access_token,
|
||||
},
|
||||
options={
|
||||
"experimental": True,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_could_not_register(self, _user_input=None):
|
||||
"""Handle issues that need transition await from progress step."""
|
||||
return self.async_abort(reason="could_not_register")
|
||||
|
||||
async def async_step_reauth(self, _user_input=None):
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(self, user_input=None):
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({}),
|
||||
)
|
||||
self._reauth = True
|
||||
return await self.async_step_device(None)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry):
|
||||
return HacsOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class HacsOptionsFlowHandler(OptionsFlow):
|
||||
"""HACS config flow options handler."""
|
||||
|
||||
def __init__(self, config_entry):
|
||||
"""Initialize HACS options flow."""
|
||||
if AwesomeVersion(HAVERSION) < "2024.11.99":
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(self, _user_input=None):
|
||||
"""Manage the options."""
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user."""
|
||||
hacs: HacsBase = self.hass.data.get(DOMAIN)
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data={**user_input, "experimental": True})
|
||||
|
||||
if hacs is None or hacs.configuration is None:
|
||||
return self.async_abort(reason="not_setup")
|
||||
|
||||
if hacs.queue.has_pending_tasks:
|
||||
return self.async_abort(reason="pending_tasks")
|
||||
|
||||
schema = {
|
||||
vol.Optional(SIDEPANEL_TITLE, default=hacs.configuration.sidepanel_title): str,
|
||||
vol.Optional(SIDEPANEL_ICON, default=hacs.configuration.sidepanel_icon): str,
|
||||
vol.Optional(COUNTRY, default=hacs.configuration.country): vol.In(LOCALE),
|
||||
vol.Optional(APPDAEMON, default=hacs.configuration.appdaemon): bool,
|
||||
}
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))
|
||||
294
custom_components/hacs/const.py
Normal file
294
custom_components/hacs/const.py
Normal file
@@ -0,0 +1,294 @@
|
||||
"""Constants for HACS"""
|
||||
|
||||
from typing import TypeVar
|
||||
|
||||
from aiogithubapi.common.const import ACCEPT_HEADERS
|
||||
|
||||
NAME_SHORT = "HACS"
|
||||
DOMAIN = "hacs"
|
||||
CLIENT_ID = "395a8e669c5de9f7c6e8"
|
||||
MINIMUM_HA_VERSION = "2024.4.1"
|
||||
|
||||
URL_BASE = "/hacsfiles"
|
||||
|
||||
TV = TypeVar("TV")
|
||||
|
||||
PACKAGE_NAME = "custom_components.hacs"
|
||||
|
||||
DEFAULT_CONCURRENT_TASKS = 15
|
||||
DEFAULT_CONCURRENT_BACKOFF_TIME = 1
|
||||
|
||||
HACS_REPOSITORY_ID = "172733314"
|
||||
|
||||
HACS_ACTION_GITHUB_API_HEADERS = {
|
||||
"User-Agent": "HACS/action",
|
||||
"Accept": ACCEPT_HEADERS["preview"],
|
||||
}
|
||||
|
||||
VERSION_STORAGE = "6"
|
||||
STORENAME = "hacs"
|
||||
|
||||
HACS_SYSTEM_ID = "0717a0cd-745c-48fd-9b16-c8534c9704f9-bc944b0f-fd42-4a58-a072-ade38d1444cd"
|
||||
|
||||
STARTUP = """
|
||||
-------------------------------------------------------------------
|
||||
HACS (Home Assistant Community Store)
|
||||
|
||||
Version: %s
|
||||
This is a custom integration
|
||||
If you have any issues with this you need to open an issue here:
|
||||
https://github.com/hacs/integration/issues
|
||||
-------------------------------------------------------------------
|
||||
"""
|
||||
|
||||
LOCALE = [
|
||||
"ALL",
|
||||
"AF",
|
||||
"AL",
|
||||
"DZ",
|
||||
"AS",
|
||||
"AD",
|
||||
"AO",
|
||||
"AI",
|
||||
"AQ",
|
||||
"AG",
|
||||
"AR",
|
||||
"AM",
|
||||
"AW",
|
||||
"AU",
|
||||
"AT",
|
||||
"AZ",
|
||||
"BS",
|
||||
"BH",
|
||||
"BD",
|
||||
"BB",
|
||||
"BY",
|
||||
"BE",
|
||||
"BZ",
|
||||
"BJ",
|
||||
"BM",
|
||||
"BT",
|
||||
"BO",
|
||||
"BQ",
|
||||
"BA",
|
||||
"BW",
|
||||
"BV",
|
||||
"BR",
|
||||
"IO",
|
||||
"BN",
|
||||
"BG",
|
||||
"BF",
|
||||
"BI",
|
||||
"KH",
|
||||
"CM",
|
||||
"CA",
|
||||
"CV",
|
||||
"KY",
|
||||
"CF",
|
||||
"TD",
|
||||
"CL",
|
||||
"CN",
|
||||
"CX",
|
||||
"CC",
|
||||
"CO",
|
||||
"KM",
|
||||
"CG",
|
||||
"CD",
|
||||
"CK",
|
||||
"CR",
|
||||
"HR",
|
||||
"CU",
|
||||
"CW",
|
||||
"CY",
|
||||
"CZ",
|
||||
"CI",
|
||||
"DK",
|
||||
"DJ",
|
||||
"DM",
|
||||
"DO",
|
||||
"EC",
|
||||
"EG",
|
||||
"SV",
|
||||
"GQ",
|
||||
"ER",
|
||||
"EE",
|
||||
"ET",
|
||||
"FK",
|
||||
"FO",
|
||||
"FJ",
|
||||
"FI",
|
||||
"FR",
|
||||
"GF",
|
||||
"PF",
|
||||
"TF",
|
||||
"GA",
|
||||
"GM",
|
||||
"GE",
|
||||
"DE",
|
||||
"GH",
|
||||
"GI",
|
||||
"GR",
|
||||
"GL",
|
||||
"GD",
|
||||
"GP",
|
||||
"GU",
|
||||
"GT",
|
||||
"GG",
|
||||
"GN",
|
||||
"GW",
|
||||
"GY",
|
||||
"HT",
|
||||
"HM",
|
||||
"VA",
|
||||
"HN",
|
||||
"HK",
|
||||
"HU",
|
||||
"IS",
|
||||
"IN",
|
||||
"ID",
|
||||
"IR",
|
||||
"IQ",
|
||||
"IE",
|
||||
"IM",
|
||||
"IL",
|
||||
"IT",
|
||||
"JM",
|
||||
"JP",
|
||||
"JE",
|
||||
"JO",
|
||||
"KZ",
|
||||
"KE",
|
||||
"KI",
|
||||
"KP",
|
||||
"KR",
|
||||
"KW",
|
||||
"KG",
|
||||
"LA",
|
||||
"LV",
|
||||
"LB",
|
||||
"LS",
|
||||
"LR",
|
||||
"LY",
|
||||
"LI",
|
||||
"LT",
|
||||
"LU",
|
||||
"MO",
|
||||
"MK",
|
||||
"MG",
|
||||
"MW",
|
||||
"MY",
|
||||
"MV",
|
||||
"ML",
|
||||
"MT",
|
||||
"MH",
|
||||
"MQ",
|
||||
"MR",
|
||||
"MU",
|
||||
"YT",
|
||||
"MX",
|
||||
"FM",
|
||||
"MD",
|
||||
"MC",
|
||||
"MN",
|
||||
"ME",
|
||||
"MS",
|
||||
"MA",
|
||||
"MZ",
|
||||
"MM",
|
||||
"NA",
|
||||
"NR",
|
||||
"NP",
|
||||
"NL",
|
||||
"NC",
|
||||
"NZ",
|
||||
"NI",
|
||||
"NE",
|
||||
"NG",
|
||||
"NU",
|
||||
"NF",
|
||||
"MP",
|
||||
"NO",
|
||||
"OM",
|
||||
"PK",
|
||||
"PW",
|
||||
"PS",
|
||||
"PA",
|
||||
"PG",
|
||||
"PY",
|
||||
"PE",
|
||||
"PH",
|
||||
"PN",
|
||||
"PL",
|
||||
"PT",
|
||||
"PR",
|
||||
"QA",
|
||||
"RO",
|
||||
"RU",
|
||||
"RW",
|
||||
"RE",
|
||||
"BL",
|
||||
"SH",
|
||||
"KN",
|
||||
"LC",
|
||||
"MF",
|
||||
"PM",
|
||||
"VC",
|
||||
"WS",
|
||||
"SM",
|
||||
"ST",
|
||||
"SA",
|
||||
"SN",
|
||||
"RS",
|
||||
"SC",
|
||||
"SL",
|
||||
"SG",
|
||||
"SX",
|
||||
"SK",
|
||||
"SI",
|
||||
"SB",
|
||||
"SO",
|
||||
"ZA",
|
||||
"GS",
|
||||
"SS",
|
||||
"ES",
|
||||
"LK",
|
||||
"SD",
|
||||
"SR",
|
||||
"SJ",
|
||||
"SZ",
|
||||
"SE",
|
||||
"CH",
|
||||
"SY",
|
||||
"TW",
|
||||
"TJ",
|
||||
"TZ",
|
||||
"TH",
|
||||
"TL",
|
||||
"TG",
|
||||
"TK",
|
||||
"TO",
|
||||
"TT",
|
||||
"TN",
|
||||
"TR",
|
||||
"TM",
|
||||
"TC",
|
||||
"TV",
|
||||
"UG",
|
||||
"UA",
|
||||
"AE",
|
||||
"GB",
|
||||
"US",
|
||||
"UM",
|
||||
"UY",
|
||||
"UZ",
|
||||
"VU",
|
||||
"VE",
|
||||
"VN",
|
||||
"VG",
|
||||
"VI",
|
||||
"WF",
|
||||
"EH",
|
||||
"YE",
|
||||
"ZM",
|
||||
"ZW",
|
||||
]
|
||||
38
custom_components/hacs/coordinator.py
Normal file
38
custom_components/hacs/coordinator.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Coordinator to trigger entity updates."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, callback
|
||||
from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol
|
||||
|
||||
|
||||
class HacsUpdateCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Dispatch updates to update entities."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}
|
||||
|
||||
@callback
|
||||
def async_add_listener(
|
||||
self, update_callback: CALLBACK_TYPE, context: Any = None
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for data updates."""
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove update listener."""
|
||||
self._listeners.pop(remove_listener)
|
||||
|
||||
self._listeners[remove_listener] = (update_callback, context)
|
||||
|
||||
return remove_listener
|
||||
|
||||
@callback
|
||||
def async_update_listeners(self) -> None:
|
||||
"""Update all registered listeners."""
|
||||
for update_callback, _ in list(self._listeners.values()):
|
||||
update_callback()
|
||||
98
custom_components/hacs/data_client.py
Normal file
98
custom_components/hacs/data_client.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""HACS Data client."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientSession, ClientTimeout
|
||||
import voluptuous as vol
|
||||
|
||||
from .exceptions import HacsException, HacsNotModifiedException
|
||||
from .utils.logger import LOGGER
|
||||
from .utils.validate import (
|
||||
VALIDATE_FETCHED_V2_CRITICAL_REPO_SCHEMA,
|
||||
VALIDATE_FETCHED_V2_REMOVED_REPO_SCHEMA,
|
||||
VALIDATE_FETCHED_V2_REPO_DATA,
|
||||
)
|
||||
|
||||
CRITICAL_REMOVED_VALIDATORS = {
|
||||
"critical": VALIDATE_FETCHED_V2_CRITICAL_REPO_SCHEMA,
|
||||
"removed": VALIDATE_FETCHED_V2_REMOVED_REPO_SCHEMA,
|
||||
}
|
||||
|
||||
|
||||
class HacsDataClient:
|
||||
"""HACS Data client."""
|
||||
|
||||
def __init__(self, session: ClientSession, client_name: str) -> None:
|
||||
"""Initialize."""
|
||||
self._client_name = client_name
|
||||
self._etags = {}
|
||||
self._session = session
|
||||
|
||||
async def _do_request(
|
||||
self,
|
||||
filename: str,
|
||||
section: str | None = None,
|
||||
) -> dict[str, dict[str, Any]] | list[str]:
|
||||
"""Do request."""
|
||||
endpoint = "/".join([v for v in [section, filename] if v is not None])
|
||||
try:
|
||||
response = await self._session.get(
|
||||
f"https://data-v2.hacs.xyz/{endpoint}",
|
||||
timeout=ClientTimeout(total=60),
|
||||
headers={
|
||||
"User-Agent": self._client_name,
|
||||
"If-None-Match": self._etags.get(endpoint, ""),
|
||||
},
|
||||
)
|
||||
if response.status == 304:
|
||||
raise HacsNotModifiedException() from None
|
||||
response.raise_for_status()
|
||||
except HacsNotModifiedException:
|
||||
raise
|
||||
except TimeoutError:
|
||||
raise HacsException("Timeout of 60s reached") from None
|
||||
except Exception as exception:
|
||||
raise HacsException(f"Error fetching data from HACS: {exception}") from exception
|
||||
|
||||
self._etags[endpoint] = response.headers.get("etag")
|
||||
|
||||
return await response.json()
|
||||
|
||||
async def get_data(self, section: str | None, *, validate: bool) -> dict[str, dict[str, Any]]:
|
||||
"""Get data."""
|
||||
data = await self._do_request(filename="data.json", section=section)
|
||||
if not validate:
|
||||
return data
|
||||
|
||||
if section in VALIDATE_FETCHED_V2_REPO_DATA:
|
||||
validated = {}
|
||||
for key, repo_data in data.items():
|
||||
try:
|
||||
validated[key] = VALIDATE_FETCHED_V2_REPO_DATA[section](repo_data)
|
||||
except vol.Invalid as exception:
|
||||
LOGGER.info(
|
||||
"Got invalid data for %s (%s)", repo_data.get("full_name", key), exception
|
||||
)
|
||||
continue
|
||||
|
||||
return validated
|
||||
|
||||
if not (validator := CRITICAL_REMOVED_VALIDATORS.get(section)):
|
||||
raise ValueError(f"Do not know how to validate {section}")
|
||||
|
||||
validated = []
|
||||
for repo_data in data:
|
||||
try:
|
||||
validated.append(validator(repo_data))
|
||||
except vol.Invalid as exception:
|
||||
LOGGER.info("Got invalid data for %s (%s)", section, exception)
|
||||
continue
|
||||
|
||||
return validated
|
||||
|
||||
async def get_repositories(self, section: str) -> list[str]:
|
||||
"""Get repositories."""
|
||||
return await self._do_request(filename="repositories.json", section=section)
|
||||
80
custom_components/hacs/diagnostics.py
Normal file
80
custom_components/hacs/diagnostics.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""Diagnostics support for HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiogithubapi import GitHubException
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
|
||||
data = {
|
||||
"entry": entry.as_dict(),
|
||||
"hacs": {
|
||||
"stage": hacs.stage,
|
||||
"version": hacs.version,
|
||||
"disabled_reason": hacs.system.disabled_reason,
|
||||
"new": hacs.status.new,
|
||||
"startup": hacs.status.startup,
|
||||
"categories": hacs.common.categories,
|
||||
"renamed_repositories": hacs.common.renamed_repositories,
|
||||
"archived_repositories": hacs.common.archived_repositories,
|
||||
"ignored_repositories": hacs.common.ignored_repositories,
|
||||
"lovelace_mode": hacs.core.lovelace_mode,
|
||||
"configuration": {},
|
||||
},
|
||||
"custom_repositories": [
|
||||
repo.data.full_name
|
||||
for repo in hacs.repositories.list_all
|
||||
if not hacs.repositories.is_default(str(repo.data.id))
|
||||
],
|
||||
"repositories": [],
|
||||
}
|
||||
|
||||
for key in (
|
||||
"appdaemon",
|
||||
"country",
|
||||
"debug",
|
||||
"dev",
|
||||
"python_script",
|
||||
"release_limit",
|
||||
"theme",
|
||||
):
|
||||
data["hacs"]["configuration"][key] = getattr(hacs.configuration, key, None)
|
||||
|
||||
for repository in hacs.repositories.list_downloaded:
|
||||
data["repositories"].append(
|
||||
{
|
||||
"data": repository.data.to_json(),
|
||||
"integration_manifest": repository.integration_manifest,
|
||||
"repository_manifest": repository.repository_manifest.to_dict(),
|
||||
"ref": repository.ref,
|
||||
"paths": {
|
||||
"localpath": repository.localpath.replace(hacs.core.config_path, "/config"),
|
||||
"local": repository.content.path.local.replace(
|
||||
hacs.core.config_path, "/config"
|
||||
),
|
||||
"remote": repository.content.path.remote,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
rate_limit_response = await hacs.githubapi.rate_limit()
|
||||
data["rate_limit"] = rate_limit_response.data.as_dict
|
||||
except GitHubException as exception:
|
||||
data["rate_limit"] = str(exception)
|
||||
|
||||
return async_redact_data(data, ("token",))
|
||||
143
custom_components/hacs/entity.py
Normal file
143
custom_components/hacs/entity.py
Normal file
@@ -0,0 +1,143 @@
|
||||
"""HACS Base entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.update_coordinator import BaseCoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, HACS_SYSTEM_ID, NAME_SHORT
|
||||
from .coordinator import HacsUpdateCoordinator
|
||||
from .enums import HacsDispatchEvent, HacsGitHubRepo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import HacsBase
|
||||
from .repositories.base import HacsRepository
|
||||
|
||||
|
||||
def system_info(hacs: HacsBase) -> dict:
|
||||
"""Return system info."""
|
||||
return {
|
||||
"identifiers": {(DOMAIN, HACS_SYSTEM_ID)},
|
||||
"name": NAME_SHORT,
|
||||
"manufacturer": "hacs.xyz",
|
||||
"model": "",
|
||||
"sw_version": str(hacs.version),
|
||||
"configuration_url": "homeassistant://hacs",
|
||||
"entry_type": DeviceEntryType.SERVICE,
|
||||
}
|
||||
|
||||
|
||||
class HacsBaseEntity(Entity):
|
||||
"""Base HACS entity."""
|
||||
|
||||
repository: HacsRepository | None = None
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, hacs: HacsBase) -> None:
|
||||
"""Initialize."""
|
||||
self.hacs = hacs
|
||||
|
||||
|
||||
class HacsDispatcherEntity(HacsBaseEntity):
|
||||
"""Base HACS entity listening to dispatcher signals."""
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register for status events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
self._update_and_write_state,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update(self) -> None:
|
||||
"""Update the sensor."""
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Manual updates of the sensor."""
|
||||
self._update()
|
||||
|
||||
@callback
|
||||
def _update_and_write_state(self, _: Any) -> None:
|
||||
"""Update the entity and write state."""
|
||||
self._update()
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class HacsSystemEntity(HacsDispatcherEntity):
|
||||
"""Base system entity."""
|
||||
|
||||
_attr_icon = "hacs:hacs"
|
||||
_attr_unique_id = HACS_SYSTEM_ID
|
||||
|
||||
@property
|
||||
def device_info(self) -> dict[str, any]:
|
||||
"""Return device information about HACS."""
|
||||
return system_info(self.hacs)
|
||||
|
||||
|
||||
class HacsRepositoryEntity(BaseCoordinatorEntity[HacsUpdateCoordinator], HacsBaseEntity):
|
||||
"""Base repository entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hacs: HacsBase,
|
||||
repository: HacsRepository,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
BaseCoordinatorEntity.__init__(self, hacs.coordinators[repository.data.category])
|
||||
HacsBaseEntity.__init__(self, hacs=hacs)
|
||||
self.repository = repository
|
||||
self._attr_unique_id = str(repository.data.id)
|
||||
self._repo_last_fetched = repository.data.last_fetched
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.hacs.repositories.is_downloaded(repository_id=str(self.repository.data.id))
|
||||
|
||||
@property
|
||||
def device_info(self) -> dict[str, any]:
|
||||
"""Return device information about HACS."""
|
||||
if self.repository.data.full_name == HacsGitHubRepo.INTEGRATION:
|
||||
return system_info(self.hacs)
|
||||
|
||||
def _manufacturer():
|
||||
if authors := self.repository.data.authors:
|
||||
return ", ".join(author.replace("@", "") for author in authors)
|
||||
return self.repository.data.full_name.split("/")[0]
|
||||
|
||||
return {
|
||||
"identifiers": {(DOMAIN, str(self.repository.data.id))},
|
||||
"name": self.repository.display_name,
|
||||
"model": self.repository.data.category,
|
||||
"manufacturer": _manufacturer(),
|
||||
"configuration_url": f"homeassistant://hacs/repository/{self.repository.data.id}",
|
||||
"entry_type": DeviceEntryType.SERVICE,
|
||||
}
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if (
|
||||
self._repo_last_fetched is not None
|
||||
and self.repository.data.last_fetched is not None
|
||||
and self._repo_last_fetched >= self.repository.data.last_fetched
|
||||
):
|
||||
return
|
||||
|
||||
self._repo_last_fetched = self.repository.data.last_fetched
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity.
|
||||
|
||||
Only used by the generic entity update service.
|
||||
"""
|
||||
71
custom_components/hacs/enums.py
Normal file
71
custom_components/hacs/enums.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Helper constants."""
|
||||
|
||||
# pylint: disable=missing-class-docstring
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class HacsGitHubRepo(StrEnum):
|
||||
"""HacsGitHubRepo."""
|
||||
|
||||
DEFAULT = "hacs/default"
|
||||
INTEGRATION = "hacs/integration"
|
||||
|
||||
|
||||
class HacsCategory(StrEnum):
|
||||
APPDAEMON = "appdaemon"
|
||||
INTEGRATION = "integration"
|
||||
LOVELACE = "lovelace"
|
||||
PLUGIN = "plugin" # Kept for legacy purposes
|
||||
PYTHON_SCRIPT = "python_script"
|
||||
TEMPLATE = "template"
|
||||
THEME = "theme"
|
||||
REMOVED = "removed"
|
||||
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
|
||||
class HacsDispatchEvent(StrEnum):
|
||||
"""HacsDispatchEvent."""
|
||||
|
||||
CONFIG = "hacs_dispatch_config"
|
||||
ERROR = "hacs_dispatch_error"
|
||||
RELOAD = "hacs_dispatch_reload"
|
||||
REPOSITORY = "hacs_dispatch_repository"
|
||||
REPOSITORY_DOWNLOAD_PROGRESS = "hacs_dispatch_repository_download_progress"
|
||||
STAGE = "hacs_dispatch_stage"
|
||||
STARTUP = "hacs_dispatch_startup"
|
||||
STATUS = "hacs_dispatch_status"
|
||||
|
||||
|
||||
class RepositoryFile(StrEnum):
|
||||
"""Repository file names."""
|
||||
|
||||
HACS_JSON = "hacs.json"
|
||||
MAINIFEST_JSON = "manifest.json"
|
||||
|
||||
|
||||
class LovelaceMode(StrEnum):
|
||||
"""Lovelace Modes."""
|
||||
|
||||
STORAGE = "storage"
|
||||
AUTO = "auto"
|
||||
AUTO_GEN = "auto-gen"
|
||||
YAML = "yaml"
|
||||
|
||||
|
||||
class HacsStage(StrEnum):
|
||||
SETUP = "setup"
|
||||
STARTUP = "startup"
|
||||
WAITING = "waiting"
|
||||
RUNNING = "running"
|
||||
BACKGROUND = "background"
|
||||
|
||||
|
||||
class HacsDisabledReason(StrEnum):
|
||||
RATE_LIMIT = "rate_limit"
|
||||
REMOVED = "removed"
|
||||
INVALID_TOKEN = "invalid_token"
|
||||
CONSTRAINS = "constrains"
|
||||
LOAD_HACS = "load_hacs"
|
||||
RESTORE = "restore"
|
||||
49
custom_components/hacs/exceptions.py
Normal file
49
custom_components/hacs/exceptions.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Custom Exceptions for HACS."""
|
||||
|
||||
|
||||
class HacsException(Exception):
|
||||
"""Super basic."""
|
||||
|
||||
|
||||
class HacsRepositoryArchivedException(HacsException):
|
||||
"""For repositories that are archived."""
|
||||
|
||||
|
||||
class HacsNotModifiedException(HacsException):
|
||||
"""For responses that are not modified."""
|
||||
|
||||
|
||||
class HacsExpectedException(HacsException):
|
||||
"""For stuff that are expected."""
|
||||
|
||||
|
||||
class HacsRepositoryExistException(HacsException):
|
||||
"""For repositories that are already exist."""
|
||||
|
||||
|
||||
class HacsExecutionStillInProgress(HacsException):
|
||||
"""Exception to raise if execution is still in progress."""
|
||||
|
||||
|
||||
class AddonRepositoryException(HacsException):
|
||||
"""Exception to raise when user tries to add add-on repository."""
|
||||
|
||||
exception_message = (
|
||||
"The repository does not seem to be a integration, "
|
||||
"but an add-on repository. HACS does not manage add-ons."
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(self.exception_message)
|
||||
|
||||
|
||||
class HomeAssistantCoreRepositoryException(HacsException):
|
||||
"""Exception to raise when user tries to add the home-assistant/core repository."""
|
||||
|
||||
exception_message = (
|
||||
"You can not add homeassistant/core, to use core integrations "
|
||||
"check the Home Assistant documentation for how to add them."
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(self.exception_message)
|
||||
67
custom_components/hacs/frontend.py
Normal file
67
custom_components/hacs/frontend.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Starting setup task: Frontend."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.components.frontend import (
|
||||
add_extra_js_url,
|
||||
async_register_built_in_panel,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, URL_BASE
|
||||
from .hacs_frontend import VERSION as FE_VERSION, locate_dir
|
||||
from .utils.workarounds import async_register_static_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .base import HacsBase
|
||||
|
||||
|
||||
async def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
|
||||
"""Register the frontend."""
|
||||
|
||||
# Register frontend
|
||||
if hacs.configuration.dev and (frontend_path := os.getenv("HACS_FRONTEND_DIR")):
|
||||
hacs.log.warning(
|
||||
"<HacsFrontend> Frontend development mode enabled. Do not run in production!"
|
||||
)
|
||||
await async_register_static_path(
|
||||
hass, f"{URL_BASE}/frontend", f"{frontend_path}/hacs_frontend", cache_headers=False
|
||||
)
|
||||
hacs.frontend_version = "dev"
|
||||
else:
|
||||
await async_register_static_path(
|
||||
hass, f"{URL_BASE}/frontend", locate_dir(), cache_headers=False
|
||||
)
|
||||
hacs.frontend_version = FE_VERSION
|
||||
|
||||
# Custom iconset
|
||||
await async_register_static_path(
|
||||
hass, f"{URL_BASE}/iconset.js", str(hacs.integration_dir / "iconset.js")
|
||||
)
|
||||
add_extra_js_url(hass, f"{URL_BASE}/iconset.js")
|
||||
|
||||
# Add to sidepanel if needed
|
||||
if DOMAIN not in hass.data.get("frontend_panels", {}):
|
||||
async_register_built_in_panel(
|
||||
hass,
|
||||
component_name="custom",
|
||||
sidebar_title=hacs.configuration.sidepanel_title,
|
||||
sidebar_icon=hacs.configuration.sidepanel_icon,
|
||||
frontend_url_path=DOMAIN,
|
||||
config={
|
||||
"_panel_custom": {
|
||||
"name": "hacs-frontend",
|
||||
"embed_iframe": True,
|
||||
"trust_external": False,
|
||||
"js_url": f"/hacsfiles/frontend/entrypoint.js?hacstag={hacs.frontend_version}",
|
||||
}
|
||||
},
|
||||
require_admin=True,
|
||||
)
|
||||
|
||||
# Setup plugin endpoint if needed
|
||||
await hacs.async_setup_frontend_endpoint_plugin()
|
||||
5
custom_components/hacs/hacs_frontend/__init__.py
Normal file
5
custom_components/hacs/hacs_frontend/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""HACS Frontend"""
|
||||
from .version import VERSION
|
||||
|
||||
def locate_dir():
|
||||
return __path__[0]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
1
custom_components/hacs/hacs_frontend/entrypoint.js
Normal file
1
custom_components/hacs/hacs_frontend/entrypoint.js
Normal file
@@ -0,0 +1 @@
|
||||
!function(){function n(n){var e=document.createElement("script");e.src=n,document.body.appendChild(e)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/hacsfiles/frontend/frontend_es5/entrypoint.c180d0b256f9b6d0.js");else try{new Function("import('/hacsfiles/frontend/frontend_latest/entrypoint.bb9d28f38e9fba76.js')")()}catch(e){n("/hacsfiles/frontend/frontend_es5/entrypoint.c180d0b256f9b6d0.js")}}()
|
||||
1
custom_components/hacs/hacs_frontend/extra.js
Normal file
1
custom_components/hacs/hacs_frontend/extra.js
Normal file
@@ -0,0 +1 @@
|
||||
!function(){function e(e){var n=document.createElement("script");n.src=e,document.body.appendChild(n)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))e("/hacsfiles/frontend/frontend_es5/extra.5b474fd28ce35f7e.js");else try{new Function("import('/hacsfiles/frontend/frontend_latest/extra.fb9760592efef202.js')")()}catch(n){e("/hacsfiles/frontend/frontend_es5/extra.5b474fd28ce35f7e.js")}}()
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2018 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2020 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2021 Google LLC
|
||||
* SPDX-LIcense-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Copyright 2020 The Pennsylvania State University
|
||||
* @license Apache-2.0, see License.md for full text.
|
||||
*/
|
||||
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -0,0 +1,2 @@
|
||||
(self.webpackChunkhacs_frontend=self.webpackChunkhacs_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var a=String(e).split("."),l=!a[1],t=Number(a[0])==e,o=t&&a[0].slice(-1),r=t&&a[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&l?"one":"other"}},locale:"en"})}}]);
|
||||
//# sourceMappingURL=1236.7495ccc08957b0ec.js.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user