This commit is contained in:
2026-01-30 23:31:00 -06:00
commit a39095b3de
2665 changed files with 263970 additions and 0 deletions

View File

@@ -0,0 +1 @@
"""Initialize HACS utils."""

View File

@@ -0,0 +1,110 @@
"""Backup."""
from __future__ import annotations
import os
import shutil
import tempfile
from time import sleep
from typing import TYPE_CHECKING
from .path import is_safe
if TYPE_CHECKING:
from ..base import HacsBase
from ..repositories.base import HacsRepository
DEFAULT_BACKUP_PATH = f"{tempfile.gettempdir()}/hacs_backup/"
class Backup:
"""Backup."""
def __init__(
self,
hacs: HacsBase,
local_path: str | None = None,
backup_path: str = DEFAULT_BACKUP_PATH,
repository: HacsRepository | None = None,
) -> None:
"""Initialize."""
self.hacs = hacs
self.repository = repository
self.local_path = local_path or repository.content.path.local
self.backup_path = backup_path
if repository:
self.backup_path = (
tempfile.gettempdir()
+ f"/hacs_persistent_{repository.data.category}/"
+ repository.data.name
)
self.backup_path_full = f"{self.backup_path}{self.local_path.split('/')[-1]}"
def _init_backup_dir(self) -> bool:
"""Init backup dir."""
if not os.path.exists(self.local_path):
return False
if not is_safe(self.hacs, self.local_path):
return False
if os.path.exists(self.backup_path):
shutil.rmtree(self.backup_path)
# Wait for the folder to be removed
while os.path.exists(self.backup_path):
sleep(0.1)
os.makedirs(self.backup_path, exist_ok=True)
return True
def create(self) -> None:
"""Create a backup in /tmp"""
if not self._init_backup_dir():
return
try:
if os.path.isfile(self.local_path):
shutil.copyfile(self.local_path, self.backup_path_full)
os.remove(self.local_path)
else:
shutil.copytree(self.local_path, self.backup_path_full)
shutil.rmtree(self.local_path)
while os.path.exists(self.local_path):
sleep(0.1)
self.hacs.log.debug(
"Backup for %s, created in %s",
self.local_path,
self.backup_path_full,
)
except (
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
) as exception:
self.hacs.log.warning("Could not create backup: %s", exception)
def restore(self) -> None:
"""Restore from backup."""
if not os.path.exists(self.backup_path_full):
return
if os.path.isfile(self.backup_path_full):
if os.path.exists(self.local_path):
os.remove(self.local_path)
shutil.copyfile(self.backup_path_full, self.local_path)
else:
if os.path.exists(self.local_path):
shutil.rmtree(self.local_path)
while os.path.exists(self.local_path):
sleep(0.1)
shutil.copytree(self.backup_path_full, self.local_path)
self.hacs.log.debug("Restored %s, from backup %s", self.local_path, self.backup_path_full)
def cleanup(self) -> None:
"""Cleanup backup files."""
if not os.path.exists(self.backup_path):
return
shutil.rmtree(self.backup_path)
# Wait for the folder to be removed
while os.path.exists(self.backup_path):
sleep(0.1)
self.hacs.log.debug("Backup dir %s cleared", self.backup_path)

View File

@@ -0,0 +1,9 @@
"""HACS Configuration Schemas."""
# Configuration:
SIDEPANEL_TITLE = "sidepanel_title"
SIDEPANEL_ICON = "sidepanel_icon"
APPDAEMON = "appdaemon"
# Options:
COUNTRY = "country"

View File

@@ -0,0 +1,323 @@
"""Data handler for HACS."""
from __future__ import annotations
import asyncio
from datetime import UTC, datetime
from typing import Any
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from ..base import HacsBase
from ..const import HACS_REPOSITORY_ID
from ..enums import HacsDisabledReason, HacsDispatchEvent
from ..repositories.base import TOPIC_FILTER, HacsManifest, HacsRepository
from .logger import LOGGER
from .path import is_safe
from .store import async_load_from_store, async_save_to_store
EXPORTED_BASE_DATA = (
("new", False),
("full_name", ""),
)
EXPORTED_REPOSITORY_DATA = EXPORTED_BASE_DATA + (
("authors", []),
("category", ""),
("description", ""),
("domain", None),
("downloads", 0),
("etag_repository", None),
("hide", False),
("last_updated", 0),
("new", False),
("stargazers_count", 0),
("topics", []),
)
EXPORTED_DOWNLOADED_REPOSITORY_DATA = EXPORTED_REPOSITORY_DATA + (
("archived", False),
("config_flow", False),
("default_branch", None),
("first_install", False),
("installed_commit", None),
("installed", False),
("last_commit", None),
("last_version", None),
("manifest_name", None),
("open_issues", 0),
("prerelease", None),
("published_tags", []),
("releases", False),
("selected_tag", None),
("show_beta", False),
)
class HacsData:
"""HacsData class."""
def __init__(self, hacs: HacsBase):
"""Initialize."""
self.logger = LOGGER
self.hacs = hacs
self.content = {}
async def async_force_write(self, _=None):
"""Force write."""
await self.async_write(force=True)
async def async_write(self, force: bool = False) -> None:
"""Write content to the store files."""
if not force and self.hacs.system.disabled:
return
self.logger.debug("<HacsData async_write> Saving data")
# Hacs
await async_save_to_store(
self.hacs.hass,
"hacs",
{
"archived_repositories": self.hacs.common.archived_repositories,
"renamed_repositories": self.hacs.common.renamed_repositories,
"ignored_repositories": self.hacs.common.ignored_repositories,
},
)
await self._async_store_experimental_content_and_repos()
await self._async_store_content_and_repos()
async def _async_store_content_and_repos(self, _=None): # bb: ignore
"""Store the main repos file and each repo that is out of date."""
# Repositories
self.content = {}
for repository in self.hacs.repositories.list_all:
if repository.data.category in self.hacs.common.categories:
self.async_store_repository_data(repository)
await async_save_to_store(self.hacs.hass, "repositories", self.content)
for event in (HacsDispatchEvent.REPOSITORY, HacsDispatchEvent.CONFIG):
self.hacs.async_dispatch(event, {})
async def _async_store_experimental_content_and_repos(self, _=None):
"""Store the main repos file and each repo that is out of date."""
# Repositories
self.content = {}
for repository in self.hacs.repositories.list_all:
if repository.data.category in self.hacs.common.categories:
self.async_store_experimental_repository_data(repository)
await async_save_to_store(self.hacs.hass, "data", {"repositories": self.content})
@callback
def async_store_repository_data(self, repository: HacsRepository) -> dict:
"""Store the repository data."""
data = {"repository_manifest": repository.repository_manifest.manifest}
for key, default in (
EXPORTED_DOWNLOADED_REPOSITORY_DATA
if repository.data.installed
else EXPORTED_REPOSITORY_DATA
):
if (value := getattr(repository.data, key, default)) != default:
data[key] = value
if repository.data.installed_version:
data["version_installed"] = repository.data.installed_version
if repository.data.last_fetched:
data["last_fetched"] = repository.data.last_fetched.timestamp()
self.content[str(repository.data.id)] = data
@callback
def async_store_experimental_repository_data(self, repository: HacsRepository) -> None:
"""Store the experimental repository data for non downloaded repositories."""
data = {}
self.content.setdefault(repository.data.category, [])
if repository.data.installed:
data["repository_manifest"] = repository.repository_manifest.manifest
for key, default in EXPORTED_DOWNLOADED_REPOSITORY_DATA:
if (value := getattr(repository.data, key, default)) != default:
data[key] = value
if repository.data.installed_version:
data["version_installed"] = repository.data.installed_version
if repository.data.last_fetched:
data["last_fetched"] = repository.data.last_fetched.timestamp()
else:
for key, default in EXPORTED_BASE_DATA:
if (value := getattr(repository.data, key, default)) != default:
data[key] = value
self.content[repository.data.category].append({"id": str(repository.data.id), **data})
async def restore(self):
"""Restore saved data."""
self.hacs.status.new = False
repositories = {}
hacs = {}
try:
hacs = await async_load_from_store(self.hacs.hass, "hacs") or {}
except HomeAssistantError:
pass
try:
repositories = await async_load_from_store(self.hacs.hass, "repositories")
if not repositories and (data := await async_load_from_store(self.hacs.hass, "data")):
for category, entries in data.get("repositories", {}).items():
for repository in entries:
repositories[repository["id"]] = {"category": category, **repository}
except HomeAssistantError as exception:
self.hacs.log.error(
"Could not read %s, restore the file from a backup - %s",
self.hacs.hass.config.path(".storage/hacs.data"),
exception,
)
self.hacs.disable_hacs(HacsDisabledReason.RESTORE)
return False
if not hacs and not repositories:
# Assume new install
self.hacs.status.new = True
return True
self.logger.info("<HacsData restore> Restore started")
# Hacs
self.hacs.common.archived_repositories = set()
self.hacs.common.ignored_repositories = set()
self.hacs.common.renamed_repositories = {}
# Clear out doubble renamed values
renamed = hacs.get("renamed_repositories", {})
for entry in renamed:
value = renamed.get(entry)
if value not in renamed:
self.hacs.common.renamed_repositories[entry] = value
# Clear out doubble archived values
for entry in hacs.get("archived_repositories", set()):
if entry not in self.hacs.common.archived_repositories:
self.hacs.common.archived_repositories.add(entry)
# Clear out doubble ignored values
for entry in hacs.get("ignored_repositories", set()):
if entry not in self.hacs.common.ignored_repositories:
self.hacs.common.ignored_repositories.add(entry)
try:
await self.register_unknown_repositories(repositories)
for entry, repo_data in repositories.items():
if entry == "0":
# Ignore repositories with ID 0
self.logger.debug(
"<HacsData restore> Found repository with ID %s - %s", entry, repo_data
)
continue
self.async_restore_repository(entry, repo_data)
self.logger.info("<HacsData restore> Restore done")
except (
# lgtm [py/catch-base-exception] pylint: disable=broad-except
BaseException
) as exception:
self.logger.critical(
"<HacsData restore> [%s] Restore Failed!", exception, exc_info=exception
)
return False
return True
async def register_unknown_repositories(
self, repositories: dict[str, dict[str, Any]], category: str | None = None
):
"""Registry any unknown repositories."""
for repo_idx, (entry, repo_data) in enumerate(repositories.items()):
# async_register_repository is awaited in a loop
# since its unlikely to ever suspend at startup
if (
entry == "0"
or repo_data.get("category", category) is None
or self.hacs.repositories.is_registered(repository_id=entry)
):
continue
await self.hacs.async_register_repository(
repository_full_name=repo_data["full_name"],
category=repo_data.get("category", category),
check=False,
repository_id=entry,
)
if repo_idx % 100 == 0:
# yield to avoid blocking the event loop
await asyncio.sleep(0)
@callback
def async_restore_repository(self, entry: str, repository_data: dict[str, Any]):
"""Restore repository."""
repository: HacsRepository | None = None
if full_name := repository_data.get("full_name"):
repository = self.hacs.repositories.get_by_full_name(full_name)
if not repository:
repository = self.hacs.repositories.get_by_id(entry)
if not repository:
return
try:
self.hacs.repositories.set_repository_id(repository, entry)
except ValueError as exception:
self.logger.warning("<HacsData async_restore_repository> duplicate IDs %s", exception)
return
# Restore repository attributes
repository.data.authors = repository_data.get("authors", [])
repository.data.description = repository_data.get("description", "")
repository.data.downloads = repository_data.get("downloads", 0)
repository.data.last_updated = repository_data.get("last_updated", 0)
if self.hacs.system.generator:
repository.data.etag_releases = repository_data.get("etag_releases")
repository.data.open_issues = repository_data.get("open_issues", 0)
repository.data.etag_repository = repository_data.get("etag_repository")
repository.data.topics = [
topic for topic in repository_data.get("topics", []) if topic not in TOPIC_FILTER
]
repository.data.domain = repository_data.get("domain")
repository.data.stargazers_count = repository_data.get(
"stargazers_count"
) or repository_data.get("stars", 0)
repository.releases.last_release = repository_data.get("last_release_tag")
repository.data.releases = repository_data.get("releases", False)
repository.data.installed = repository_data.get("installed", False)
repository.data.new = repository_data.get("new", False)
repository.data.selected_tag = repository_data.get("selected_tag")
repository.data.show_beta = repository_data.get("show_beta", False)
repository.data.last_version = repository_data.get("last_version")
repository.data.prerelease = repository_data.get("prerelease")
repository.data.last_commit = repository_data.get("last_commit")
repository.data.installed_version = repository_data.get("version_installed")
repository.data.installed_commit = repository_data.get("installed_commit")
repository.data.manifest_name = repository_data.get("manifest_name")
if last_fetched := repository_data.get("last_fetched"):
repository.data.last_fetched = datetime.fromtimestamp(last_fetched, UTC)
repository.repository_manifest = HacsManifest.from_dict(
repository_data.get("manifest") or repository_data.get("repository_manifest") or {}
)
if repository.data.prerelease == repository.data.last_version:
repository.data.prerelease = None
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
# Set local path
repository.content.path.local = repository.localpath
if repository.data.installed:
repository.data.first_install = False
if entry == HACS_REPOSITORY_ID:
repository.data.installed_version = self.hacs.version
repository.data.installed = True

View File

@@ -0,0 +1,8 @@
"""Util to decode content from the github API."""
from base64 import b64decode
def decode_content(content: str) -> str:
"""Decode content."""
return b64decode(bytearray(content, "utf-8")).decode()

View File

@@ -0,0 +1,43 @@
"""HACS Decorators."""
from __future__ import annotations
import asyncio
from collections.abc import Coroutine
from functools import wraps
from typing import TYPE_CHECKING, Any
from ..const import DEFAULT_CONCURRENT_BACKOFF_TIME, DEFAULT_CONCURRENT_TASKS
if TYPE_CHECKING:
from ..base import HacsBase
def concurrent(
concurrenttasks: int = DEFAULT_CONCURRENT_TASKS,
backoff_time: int = DEFAULT_CONCURRENT_BACKOFF_TIME,
) -> Coroutine[Any, Any, None]:
"""Return a modified function."""
max_concurrent = asyncio.Semaphore(concurrenttasks)
def inner_function(function) -> Coroutine[Any, Any, None]:
@wraps(function)
async def wrapper(*args, **kwargs) -> None:
hacs: HacsBase = getattr(args[0], "hacs", None)
async with max_concurrent:
result = await function(*args, **kwargs)
if (
hacs is None
or hacs.queue is None
or hacs.queue.has_pending_tasks
or "update" not in function.__name__
):
await asyncio.sleep(backoff_time)
return result
return wrapper
return inner_function

View File

@@ -0,0 +1,42 @@
"""File system functions."""
from __future__ import annotations
import os
import shutil
from typing import TypeAlias
from homeassistant.core import HomeAssistant
# From typeshed
StrOrBytesPath: TypeAlias = str | bytes | os.PathLike[str] | os.PathLike[bytes]
FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath
async def async_exists(hass: HomeAssistant, path: FileDescriptorOrPath) -> bool:
"""Test whether a path exists."""
return await hass.async_add_executor_job(os.path.exists, path)
async def async_remove(
hass: HomeAssistant, path: StrOrBytesPath, *, missing_ok: bool = False
) -> None:
"""Remove a path."""
try:
return await hass.async_add_executor_job(os.remove, path)
except FileNotFoundError:
if missing_ok:
return
raise
async def async_remove_directory(
hass: HomeAssistant, path: StrOrBytesPath, *, missing_ok: bool = False
) -> None:
"""Remove a directory."""
try:
return await hass.async_add_executor_job(shutil.rmtree, path)
except FileNotFoundError:
if missing_ok:
return
raise

View File

@@ -0,0 +1,47 @@
"""Filter functions."""
from __future__ import annotations
from typing import Any
def filter_content_return_one_of_type(
content: list[str | Any],
namestartswith: str,
filterfiltype: str,
attr: str = "name",
) -> list[str]:
"""Only match 1 of the filter."""
contents = []
filetypefound = False
for filename in content:
if isinstance(filename, str):
if filename.startswith(namestartswith):
if filename.endswith(f".{filterfiltype}"):
if not filetypefound:
contents.append(filename)
filetypefound = True
continue
else:
contents.append(filename)
else:
if getattr(filename, attr).startswith(namestartswith):
if getattr(filename, attr).endswith(f".{filterfiltype}"):
if not filetypefound:
contents.append(filename)
filetypefound = True
continue
else:
contents.append(filename)
return contents
def get_first_directory_in_directory(content: list[str | Any], dirname: str) -> str | None:
"""Return the first directory in dirname or None."""
directory = None
for path in content:
if path.full_path.startswith(dirname) and path.full_path != dirname:
if path.is_directory:
directory = path.filename
break
return directory

View File

@@ -0,0 +1,19 @@
"""GitHub GraphQL Queries."""
GET_REPOSITORY_RELEASES = """
query ($owner: String!, $name: String!, $first: Int!) {
rateLimit {
cost
}
repository(owner: $owner, name: $name) {
releases(first: $first, orderBy: {field: CREATED_AT, direction: DESC}) {
nodes {
tagName
name
isPrerelease
publishedAt
}
}
}
}
"""

View File

@@ -0,0 +1,5 @@
"""JSON utils."""
from homeassistant.util.json import json_loads
__all__ = ["json_loads"]

View File

@@ -0,0 +1,7 @@
"""Custom logger for HACS."""
import logging
from ..const import PACKAGE_NAME
LOGGER: logging.Logger = logging.getLogger(PACKAGE_NAME)

View File

@@ -0,0 +1,41 @@
"""Path utils"""
from __future__ import annotations
from functools import lru_cache
from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ..base import HacsBase
@lru_cache(maxsize=1)
def _get_safe_paths(
config_path: str,
appdaemon_path: str,
plugin_path: str,
python_script_path: str,
theme_path: str,
) -> set[str]:
"""Get safe paths."""
return {
Path(f"{config_path}/{appdaemon_path}").as_posix(),
Path(f"{config_path}/{plugin_path}").as_posix(),
Path(f"{config_path}/{python_script_path}").as_posix(),
Path(f"{config_path}/{theme_path}").as_posix(),
Path(f"{config_path}/custom_components/").as_posix(),
Path(f"{config_path}/custom_templates/").as_posix(),
}
def is_safe(hacs: HacsBase, path: str | Path) -> bool:
"""Helper to check if path is safe to remove."""
configuration = hacs.configuration
return Path(path).as_posix() not in _get_safe_paths(
hacs.core.config_path,
configuration.appdaemon_path,
configuration.plugin_path,
configuration.python_script_path,
configuration.theme_path,
)

View File

@@ -0,0 +1,82 @@
"""The QueueManager class."""
from __future__ import annotations
import asyncio
from collections.abc import Coroutine
import time
from homeassistant.core import HomeAssistant
from ..exceptions import HacsExecutionStillInProgress
from .logger import LOGGER
_LOGGER = LOGGER
class QueueManager:
"""The QueueManager class."""
def __init__(self, hass: HomeAssistant) -> None:
self.hass = hass
self.queue: list[Coroutine] = []
self.running = False
@property
def pending_tasks(self) -> int:
"""Return a count of pending tasks in the queue."""
return len(self.queue)
@property
def has_pending_tasks(self) -> bool:
"""Return a count of pending tasks in the queue."""
return self.pending_tasks != 0
def clear(self) -> None:
"""Clear the queue."""
self.queue = []
def add(self, task: Coroutine) -> None:
"""Add a task to the queue."""
self.queue.append(task)
async def execute(self, number_of_tasks: int | None = None) -> None:
"""Execute the tasks in the queue."""
if self.running:
_LOGGER.debug("<QueueManager> Execution is already running")
raise HacsExecutionStillInProgress
if len(self.queue) == 0:
_LOGGER.debug("<QueueManager> The queue is empty")
return
self.running = True
_LOGGER.debug("<QueueManager> Checking out tasks to execute")
local_queue = []
if number_of_tasks:
for task in self.queue[:number_of_tasks]:
local_queue.append(task)
else:
for task in self.queue:
local_queue.append(task)
_LOGGER.debug("<QueueManager> Starting queue execution for %s tasks", len(local_queue))
start = time.time()
result = await asyncio.gather(*local_queue, return_exceptions=True)
for entry in result:
if isinstance(entry, Exception):
_LOGGER.error("<QueueManager> %s", entry)
end = time.time() - start
for task in local_queue:
self.queue.remove(task)
_LOGGER.debug(
"<QueueManager> Queue execution finished for %s tasks finished in %.2f seconds",
len(local_queue),
end,
)
if self.has_pending_tasks:
_LOGGER.debug("<QueueManager> %s tasks remaining in the queue", len(self.queue))
self.running = False

View File

@@ -0,0 +1,17 @@
"""Regex utils"""
from __future__ import annotations
import re
RE_REPOSITORY = re.compile(
r"(?:(?:.*github.com.)|^)([A-Za-z0-9-]+\/[\w.-]+?)(?:(?:\.git)?|(?:[^\w.-].*)?)$"
)
def extract_repository_from_url(url: str) -> str | None:
"""Extract the owner/repo part form a URL."""
match = re.match(RE_REPOSITORY, url)
if not match:
return None
return match.group(1).lower()

View File

@@ -0,0 +1,79 @@
"""Storage handers."""
from homeassistant.helpers.json import JSONEncoder
from homeassistant.helpers.storage import Store
from homeassistant.util import json as json_util
from ..const import VERSION_STORAGE
from ..exceptions import HacsException
from .logger import LOGGER
_LOGGER = LOGGER
class HACSStore(Store):
"""A subclass of Store that allows multiple loads in the executor."""
def load(self):
"""Load the data from disk if version matches."""
try:
data = json_util.load_json(self.path)
except (
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
) as exception:
_LOGGER.critical(
"Could not load '%s', restore it from a backup or delete the file: %s",
self.path,
exception,
)
raise HacsException(exception) from exception
if data == {} or data["version"] != self.version:
return None
return data["data"]
def get_store_key(key):
"""Return the key to use with homeassistant.helpers.storage.Storage."""
return key if "/" in key else f"hacs.{key}"
def _get_store_for_key(hass, key, encoder):
"""Create a Store object for the key."""
return HACSStore(hass, VERSION_STORAGE, get_store_key(key), encoder=encoder, atomic_writes=True)
def get_store_for_key(hass, key):
"""Create a Store object for the key."""
return _get_store_for_key(hass, key, JSONEncoder)
async def async_load_from_store(hass, key):
"""Load the retained data from store and return de-serialized data."""
return await get_store_for_key(hass, key).async_load() or {}
async def async_save_to_store(hass, key, data):
"""Generate dynamic data to store and save it to the filesystem.
The data is only written if the content on the disk has changed
by reading the existing content and comparing it.
If the data has changed this will generate two executor jobs
If the data has not changed this will generate one executor job
"""
current = await async_load_from_store(hass, key)
if current is None or current != data:
await get_store_for_key(hass, key).async_save(data)
return
_LOGGER.debug(
"<HACSStore async_save_to_store> Did not store data for '%s'. Content did not change",
get_store_key(key),
)
async def async_remove_store(hass, key):
"""Remove a store element that should no longer be used."""
if "/" not in key:
return
await get_store_for_key(hass, key).async_remove()

View File

@@ -0,0 +1,30 @@
"""Various URL utils for HACS."""
import re
from typing import Literal
GIT_SHA = re.compile(r"^[a-fA-F0-9]{40}$")
def github_release_asset(
*,
repository: str,
version: str,
filename: str,
**_,
) -> str:
"""Generate a download URL for a release asset."""
return f"https://github.com/{repository}/releases/download/{version}/{filename}"
def github_archive(
*,
repository: str,
version: str,
variant: Literal["heads", "tags"] = "heads",
**_,
) -> str:
"""Generate a download URL for a repository zip."""
if GIT_SHA.match(version):
return f"https://github.com/{repository}/archive/{version}.zip"
return f"https://github.com/{repository}/archive/refs/{variant}/{version}.zip"

View File

@@ -0,0 +1,215 @@
"""Validation utilities."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass, field
from typing import Any
from awesomeversion import AwesomeVersion
from homeassistant.helpers.config_validation import url as url_validator
import voluptuous as vol
from ..const import LOCALE
@dataclass
class Validate:
"""Validate."""
errors: list[str] = field(default_factory=list)
@property
def success(self) -> bool:
"""Return bool if the validation was a success."""
return len(self.errors) == 0
def _country_validator(values) -> list[str]:
"""Custom country validator."""
countries = []
if isinstance(values, str):
countries.append(values.upper())
elif isinstance(values, list):
for value in values:
countries.append(value.upper())
else:
raise vol.Invalid(f"Value '{values}' is not a string or list.", path=["country"])
for country in countries:
if country not in LOCALE:
raise vol.Invalid(f"Value '{country}' is not in {LOCALE}.", path=["country"])
return countries
HACS_MANIFEST_JSON_SCHEMA = vol.Schema(
{
vol.Optional("content_in_root"): bool,
vol.Optional("country"): _country_validator,
vol.Optional("filename"): str,
vol.Optional("hacs"): str,
vol.Optional("hide_default_branch"): bool,
vol.Optional("homeassistant"): str,
vol.Optional("persistent_directory"): str,
vol.Optional("render_readme"): bool,
vol.Optional("zip_release"): bool,
vol.Required("name"): str,
},
extra=vol.PREVENT_EXTRA,
)
INTEGRATION_MANIFEST_JSON_SCHEMA = vol.Schema(
{
vol.Required("codeowners"): list,
vol.Required("documentation"): url_validator,
vol.Required("domain"): str,
vol.Required("issue_tracker"): url_validator,
vol.Required("name"): str,
vol.Required("version"): vol.Coerce(AwesomeVersion),
},
extra=vol.ALLOW_EXTRA,
)
def validate_repo_data(schema: dict[str, Any], extra: int) -> Callable[[Any], Any]:
"""Return a validator for repo data.
This is used instead of vol.All to always try both the repo schema and
and the validate_version validator.
"""
_schema = vol.Schema(schema, extra=extra)
def validate_repo_data(data: Any) -> Any:
"""Validate integration repo data."""
schema_errors: vol.MultipleInvalid | None = None
try:
_schema(data)
except vol.MultipleInvalid as err:
schema_errors = err
try:
validate_version(data)
except vol.Invalid as err:
if schema_errors:
schema_errors.add(err)
else:
raise
if schema_errors:
raise schema_errors
return data
return validate_repo_data
def validate_version(data: Any) -> Any:
"""Ensure at least one of last_commit or last_version is present."""
if "last_commit" not in data and "last_version" not in data:
raise vol.Invalid("Expected at least one of [`last_commit`, `last_version`], got none")
return data
V2_COMMON_DATA_JSON_SCHEMA = {
vol.Required("description"): vol.Any(str, None),
vol.Optional("downloads"): int,
vol.Optional("etag_releases"): str,
vol.Required("etag_repository"): str,
vol.Required("full_name"): str,
vol.Optional("last_commit"): str,
vol.Required("last_fetched"): vol.Any(int, float),
vol.Required("last_updated"): str,
vol.Optional("last_version"): str,
vol.Optional("prerelease"): str,
vol.Required("manifest"): {
vol.Optional("country"): vol.Any([str], False),
vol.Optional("name"): str,
},
vol.Optional("open_issues"): int,
vol.Optional("stargazers_count"): int,
vol.Optional("topics"): [str],
}
V2_INTEGRATION_DATA_JSON_SCHEMA = {
**V2_COMMON_DATA_JSON_SCHEMA,
vol.Required("domain"): str,
vol.Required("manifest_name"): str,
}
_V2_REPO_SCHEMAS = {
"appdaemon": V2_COMMON_DATA_JSON_SCHEMA,
"integration": V2_INTEGRATION_DATA_JSON_SCHEMA,
"plugin": V2_COMMON_DATA_JSON_SCHEMA,
"python_script": V2_COMMON_DATA_JSON_SCHEMA,
"template": V2_COMMON_DATA_JSON_SCHEMA,
"theme": V2_COMMON_DATA_JSON_SCHEMA,
}
# Used when validating repos in the hacs integration, discards extra keys
VALIDATE_FETCHED_V2_REPO_DATA = {
category: validate_repo_data(schema, vol.REMOVE_EXTRA)
for category, schema in _V2_REPO_SCHEMAS.items()
}
# Used when validating repos when generating data, fails on extra keys
VALIDATE_GENERATED_V2_REPO_DATA = {
category: vol.Schema({str: validate_repo_data(schema, vol.PREVENT_EXTRA)})
for category, schema in _V2_REPO_SCHEMAS.items()
}
V2_CRITICAL_REPO_DATA_SCHEMA = {
vol.Required("link"): str,
vol.Required("reason"): str,
vol.Required("repository"): str,
}
# Used when validating critical repos in the hacs integration, discards extra keys
VALIDATE_FETCHED_V2_CRITICAL_REPO_SCHEMA = vol.Schema(
V2_CRITICAL_REPO_DATA_SCHEMA,
extra=vol.REMOVE_EXTRA,
)
# Used when validating critical repos when generating data, fails on extra keys
VALIDATE_GENERATED_V2_CRITICAL_REPO_SCHEMA = vol.Schema(
[
vol.Schema(
V2_CRITICAL_REPO_DATA_SCHEMA,
extra=vol.PREVENT_EXTRA,
)
]
)
V2_REMOVED_REPO_DATA_SCHEMA = {
vol.Optional("link"): str,
vol.Optional("reason"): str,
vol.Required("removal_type"): vol.In(
[
"Integration is missing a version, and is abandoned.",
"Remove",
"archived",
"blacklist",
"critical",
"deprecated",
"removal",
"remove",
"removed",
"replaced",
"repository",
]
),
vol.Required("repository"): str,
}
# Used when validating removed repos in the hacs integration, discards extra keys
VALIDATE_FETCHED_V2_REMOVED_REPO_SCHEMA = vol.Schema(
V2_REMOVED_REPO_DATA_SCHEMA,
extra=vol.REMOVE_EXTRA,
)
# Used when validating removed repos when generating data, fails on extra keys
VALIDATE_GENERATED_V2_REMOVED_REPO_SCHEMA = vol.Schema(
[
vol.Schema(
V2_REMOVED_REPO_DATA_SCHEMA,
extra=vol.PREVENT_EXTRA,
)
]
)

View File

@@ -0,0 +1,36 @@
"""Version utils."""
from __future__ import annotations
from functools import lru_cache
from awesomeversion import (
AwesomeVersion,
AwesomeVersionException,
AwesomeVersionStrategy,
)
@lru_cache(maxsize=1024)
def version_left_higher_then_right(left: str, right: str) -> bool | None:
"""Return a bool if source is newer than target, will also be true if identical."""
try:
left_version = AwesomeVersion(left)
right_version = AwesomeVersion(right)
if (
left_version.strategy != AwesomeVersionStrategy.UNKNOWN
and right_version.strategy != AwesomeVersionStrategy.UNKNOWN
):
return left_version > right_version
except (AwesomeVersionException, AttributeError, KeyError):
pass
return None
def version_left_higher_or_equal_then_right(left: str, right: str) -> bool:
"""Return a bool if source is newer than target, will also be true if identical."""
if left == right:
return True
return version_left_higher_then_right(left, right)

View File

@@ -0,0 +1,37 @@
"""Workarounds."""
from homeassistant.core import HomeAssistant
DOMAIN_OVERRIDES = {
# https://github.com/hacs/integration/issues/2465
"custom-components/sensor.custom_aftership": "custom_aftership"
}
try:
from homeassistant.components.http import StaticPathConfig
async def async_register_static_path(
hass: HomeAssistant,
url_path: str,
path: str,
cache_headers: bool = True,
) -> None:
"""Register a static path with the HTTP component."""
await hass.http.async_register_static_paths(
[StaticPathConfig(url_path, path, cache_headers)]
)
except ImportError:
async def async_register_static_path(
hass: HomeAssistant,
url_path: str,
path: str,
cache_headers: bool = True,
) -> None:
"""Register a static path with the HTTP component.
Legacy: Can be removed when min version is 2024.7
https://developers.home-assistant.io/blog/2024/06/18/async_register_static_paths/
"""
hass.http.register_static_path(url_path, path, cache_headers)