Compare commits
2 Commits
5731827224
...
e2f4adb22e
| Author | SHA1 | Date |
|---|---|---|
|
|
e2f4adb22e | |
|
|
1fec8313d4 |
|
|
@ -1,7 +1,33 @@
|
|||
"""The Sencor SWS 12500 Weather Station integration."""
|
||||
"""Sencor SWS 12500 Weather Station integration (push/webhook based).
|
||||
|
||||
Architecture overview
|
||||
---------------------
|
||||
This integration is *push-based*: the weather station calls our HTTP endpoint and we
|
||||
receive a query payload. We do not poll the station.
|
||||
|
||||
Key building blocks:
|
||||
- `WeatherDataUpdateCoordinator` acts as an in-memory "data bus" for the latest payload.
|
||||
On each webhook request we call `async_set_updated_data(...)` and all `CoordinatorEntity`
|
||||
sensors get notified and update their states.
|
||||
- `hass.data[DOMAIN][entry_id]` is a per-entry *dict* that stores runtime state
|
||||
(coordinator instance, options snapshot, and sensor platform callbacks). Keeping this
|
||||
structure consistent is critical; mixing different value types under the same key can
|
||||
break listener wiring and make the UI appear "frozen".
|
||||
|
||||
Auto-discovery
|
||||
--------------
|
||||
When the station starts sending a new field, we:
|
||||
1) persist the new sensor key into options (`SENSORS_TO_LOAD`)
|
||||
2) dynamically add the new entity through the sensor platform (without reloading)
|
||||
|
||||
Why avoid reload?
|
||||
Reloading a config entry unloads platforms temporarily, which removes coordinator listeners.
|
||||
With a high-frequency push source (webhook), a reload at the wrong moment can lead to a
|
||||
period where no entities are subscribed, causing stale states until another full reload/restart.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
import aiohttp.web
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
|
|
@ -21,7 +47,6 @@ from .const import (
|
|||
API_ID,
|
||||
API_KEY,
|
||||
DEFAULT_URL,
|
||||
DEV_DBG,
|
||||
DOMAIN,
|
||||
POCASI_CZ_ENABLED,
|
||||
SENSORS_TO_LOAD,
|
||||
|
|
@ -29,6 +54,7 @@ from .const import (
|
|||
WSLINK,
|
||||
WSLINK_URL,
|
||||
)
|
||||
from .data import ENTRY_COORDINATOR, ENTRY_LAST_OPTIONS
|
||||
from .pocasti_cz import PocasiPush
|
||||
from .routes import Routes
|
||||
from .utils import (
|
||||
|
|
@ -51,25 +77,54 @@ class IncorrectDataError(InvalidStateError):
|
|||
"""Invalid exception."""
|
||||
|
||||
|
||||
# NOTE:
|
||||
# We intentionally avoid importing the sensor platform module at import-time here.
|
||||
# Home Assistant can import modules in different orders; keeping imports acyclic
|
||||
# prevents "partially initialized module" failures (circular imports / partially initialized modules).
|
||||
#
|
||||
# When we need to dynamically add sensors, we do a local import inside the webhook handler.
|
||||
|
||||
|
||||
class WeatherDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Manage fetched data."""
|
||||
"""Coordinator for push updates.
|
||||
|
||||
Even though Home Assistant's `DataUpdateCoordinator` is often used for polling,
|
||||
it also works well as a "fan-out" mechanism for push integrations:
|
||||
- webhook handler updates `self.data` via `async_set_updated_data`
|
||||
- all `CoordinatorEntity` instances subscribed to this coordinator update themselves
|
||||
"""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigEntry) -> None:
|
||||
"""Init global updater."""
|
||||
"""Initialize the coordinator.
|
||||
|
||||
`config` is the config entry for this integration instance. We store it because
|
||||
the webhook handler needs access to options (auth data, enabled features, etc.).
|
||||
"""
|
||||
self.hass: HomeAssistant = hass
|
||||
self.config: ConfigEntry = config
|
||||
self.windy: WindyPush = WindyPush(hass, config)
|
||||
self.pocasi: PocasiPush = PocasiPush(hass, config)
|
||||
super().__init__(hass, _LOGGER, name=DOMAIN)
|
||||
|
||||
async def recieved_data(self, webdata: aiohttp.web.Request) -> aiohttp.web.Response:
|
||||
"""Handle incoming data query."""
|
||||
async def received_data(self, webdata: aiohttp.web.Request) -> aiohttp.web.Response:
|
||||
"""Handle incoming webhook payload from the station.
|
||||
|
||||
This method:
|
||||
- validates authentication (different keys for WU vs WSLink)
|
||||
- optionally forwards data to third-party services (Windy / Pocasi)
|
||||
- remaps payload keys to internal sensor keys
|
||||
- auto-discovers new sensor fields and adds entities dynamically
|
||||
- updates coordinator data so existing entities refresh immediately
|
||||
"""
|
||||
|
||||
# WSLink uses different auth and payload field naming than the legacy endpoint.
|
||||
_wslink: bool = checked_or(self.config.options.get(WSLINK), bool, False)
|
||||
|
||||
# Incoming station payload is delivered as query params.
|
||||
# We copy it to a plain dict so it can be passed around safely.
|
||||
data: dict[str, Any] = dict(webdata.query)
|
||||
|
||||
# Check if station is sending auth data
|
||||
# Validate auth keys (different parameter names depending on endpoint mode).
|
||||
if not _wslink and ("ID" not in data or "PASSWORD" not in data):
|
||||
_LOGGER.error("Invalid request. No security data provided!")
|
||||
raise HTTPUnauthorized
|
||||
|
|
@ -88,7 +143,8 @@ class WeatherDataUpdateCoordinator(DataUpdateCoordinator):
|
|||
id_data = data.get("ID", "")
|
||||
key_data = data.get("PASSWORD", "")
|
||||
|
||||
# Check if we have valid auth data in the integration
|
||||
# Validate credentials against the integration's configured options.
|
||||
# If auth doesn't match, we reject the request (prevents random pushes from the LAN/Internet).
|
||||
|
||||
if (_id := checked(self.config.options.get(API_ID), str)) is None:
|
||||
_LOGGER.error("We don't have API ID set! Update your config!")
|
||||
|
|
@ -102,16 +158,21 @@ class WeatherDataUpdateCoordinator(DataUpdateCoordinator):
|
|||
_LOGGER.error("Unauthorised access!")
|
||||
raise HTTPUnauthorized
|
||||
|
||||
# Optional forwarding to external services. This is kept here (in the webhook handler)
|
||||
# to avoid additional background polling tasks.
|
||||
if self.config.options.get(WINDY_ENABLED, False):
|
||||
await self.windy.push_data_to_windy(data)
|
||||
|
||||
if self.config.options.get(POCASI_CZ_ENABLED, False):
|
||||
await self.pocasi.push_data_to_server(data, "WSLINK" if _wslink else "WU")
|
||||
|
||||
# Convert raw payload keys to our internal sensor keys (stable identifiers).
|
||||
remaped_items: dict[str, str] = (
|
||||
remap_wslink_items(data) if _wslink else remap_items(data)
|
||||
)
|
||||
|
||||
# Auto-discovery: if payload contains keys that are not enabled/loaded yet,
|
||||
# add them to the option list and create entities dynamically.
|
||||
if sensors := check_disabled(remaped_items, self.config):
|
||||
if (
|
||||
translate_sensors := checked(
|
||||
|
|
@ -146,14 +207,36 @@ class WeatherDataUpdateCoordinator(DataUpdateCoordinator):
|
|||
"added",
|
||||
{"added_sensors": f"{human_readable}\n"},
|
||||
)
|
||||
if _loaded_sensors := loaded_sensors(self.config_entry):
|
||||
sensors.extend(_loaded_sensors)
|
||||
await update_options(self.hass, self.config_entry, SENSORS_TO_LOAD, sensors)
|
||||
# await self.hass.config_entries.async_reload(self.config.entry_id)
|
||||
|
||||
# Persist newly discovered sensor keys to options (so they remain enabled after restart).
|
||||
newly_discovered = list(sensors)
|
||||
|
||||
if _loaded_sensors := loaded_sensors(self.config):
|
||||
sensors.extend(_loaded_sensors)
|
||||
await update_options(self.hass, self.config, SENSORS_TO_LOAD, sensors)
|
||||
|
||||
# Dynamically add newly discovered sensors *without* reloading the entry.
|
||||
#
|
||||
# Why: Reloading a config entry unloads platforms temporarily. That removes coordinator
|
||||
# listeners; with frequent webhook pushes the UI can appear "frozen" until the listeners
|
||||
# are re-established. Dynamic adds avoid this window completely.
|
||||
#
|
||||
# We do a local import to avoid circular imports at module import time.
|
||||
#
|
||||
# NOTE: Some linters prefer top-level imports. In this case the local import is
|
||||
# intentional and prevents "partially initialized module" errors.
|
||||
|
||||
from .sensor import ( # noqa: PLC0415 (local import is intentional)
|
||||
add_new_sensors,
|
||||
)
|
||||
|
||||
add_new_sensors(self.hass, self.config, newly_discovered)
|
||||
|
||||
# Fan-out update: notify all subscribed entities.
|
||||
self.async_set_updated_data(remaped_items)
|
||||
|
||||
if self.config_entry.options.get(DEV_DBG):
|
||||
# Optional dev logging (keep it lightweight to avoid log spam under high-frequency updates).
|
||||
if self.config.options.get("dev_debug_checkbox"):
|
||||
_LOGGER.info("Dev log: %s", anonymize(data))
|
||||
|
||||
return aiohttp.web.Response(body="OK", status=200)
|
||||
|
|
@ -164,7 +247,12 @@ def register_path(
|
|||
coordinator: WeatherDataUpdateCoordinator,
|
||||
config: ConfigEntry,
|
||||
) -> bool:
|
||||
"""Register paths to webhook."""
|
||||
"""Register webhook paths.
|
||||
|
||||
We register both possible endpoints and use an internal dispatcher (`Routes`) to
|
||||
enable exactly one of them. This lets us toggle WSLink mode without re-registering
|
||||
routes on the aiohttp router.
|
||||
"""
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
if (hass_data := checked(hass.data[DOMAIN], dict[str, Any])) is None:
|
||||
|
|
@ -174,13 +262,13 @@ def register_path(
|
|||
|
||||
# Create internal route dispatcher with provided urls
|
||||
routes: Routes = Routes()
|
||||
routes.add_route(DEFAULT_URL, coordinator.recieved_data, enabled=not _wslink)
|
||||
routes.add_route(WSLINK_URL, coordinator.recieved_data, enabled=_wslink)
|
||||
routes.add_route(DEFAULT_URL, coordinator.received_data, enabled=not _wslink)
|
||||
routes.add_route(WSLINK_URL, coordinator.received_data, enabled=_wslink)
|
||||
|
||||
# Register webhooks in HomeAssistant with dispatcher
|
||||
try:
|
||||
_ = hass.http.app.router.add_get(DEFAULT_URL, routes.dispatch)
|
||||
_ = hass.http.app.router.add_get(WSLINK_URL, routes.dispatch)
|
||||
_ = hass.http.app.router.add_post(WSLINK_URL, routes.dispatch)
|
||||
|
||||
# Save initialised routes
|
||||
hass_data["routes"] = routes
|
||||
|
|
@ -195,15 +283,53 @@ def register_path(
|
|||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up the config entry for my device."""
|
||||
"""Set up a config entry.
|
||||
|
||||
coordinator = WeatherDataUpdateCoordinator(hass, entry)
|
||||
Important:
|
||||
- We store per-entry runtime state under `hass.data[DOMAIN][entry_id]` as a dict.
|
||||
- We reuse the same coordinator instance across reloads so that:
|
||||
- the webhook handler keeps updating the same coordinator
|
||||
- already-created entities remain subscribed
|
||||
|
||||
hass_data = hass.data.setdefault(DOMAIN, {})
|
||||
hass_data[entry.entry_id] = coordinator
|
||||
"""
|
||||
|
||||
hass_data_any = hass.data.setdefault(DOMAIN, {})
|
||||
hass_data = cast("dict[str, Any]", hass_data_any)
|
||||
|
||||
# Per-entry runtime storage:
|
||||
# hass.data[DOMAIN][entry_id] is always a dict (never the coordinator itself).
|
||||
# Mixing types here (sometimes dict, sometimes coordinator) is a common source of hard-to-debug
|
||||
# issues where entities stop receiving updates.
|
||||
entry_data_any = hass_data.get(entry.entry_id)
|
||||
if not isinstance(entry_data_any, dict):
|
||||
entry_data_any = {}
|
||||
hass_data[entry.entry_id] = entry_data_any
|
||||
entry_data = cast("dict[str, Any]", entry_data_any)
|
||||
|
||||
# Reuse the existing coordinator across reloads so webhook handlers and entities
|
||||
# remain connected to the same coordinator instance.
|
||||
#
|
||||
# Note: Routes store a bound method (`coordinator.received_data`). If we replaced the coordinator
|
||||
# instance on reload, the dispatcher could keep calling the old instance while entities listen
|
||||
# to the new one, causing updates to "disappear".
|
||||
coordinator_any = entry_data.get(ENTRY_COORDINATOR)
|
||||
if isinstance(coordinator_any, WeatherDataUpdateCoordinator):
|
||||
coordinator_any.config = entry
|
||||
|
||||
# Recreate helper instances so they pick up updated options safely.
|
||||
coordinator_any.windy = WindyPush(hass, entry)
|
||||
coordinator_any.pocasi = PocasiPush(hass, entry)
|
||||
coordinator = coordinator_any
|
||||
else:
|
||||
coordinator = WeatherDataUpdateCoordinator(hass, entry)
|
||||
entry_data[ENTRY_COORDINATOR] = coordinator
|
||||
|
||||
routes: Routes | None = hass_data.get("routes", None)
|
||||
|
||||
# Keep an options snapshot so update_listener can skip reloads when only `SENSORS_TO_LOAD` changes.
|
||||
# Auto-discovery updates this option frequently and we do not want to reload for that case.
|
||||
entry_data[ENTRY_LAST_OPTIONS] = dict(entry.options)
|
||||
|
||||
_wslink = checked_or(entry.options.get(WSLINK), bool, False)
|
||||
|
||||
_LOGGER.debug("WS Link is %s", "enbled" if _wslink else "disabled")
|
||||
|
|
@ -227,10 +353,46 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Update setup listener."""
|
||||
"""Handle config entry option updates.
|
||||
|
||||
We skip reloading when only `SENSORS_TO_LOAD` changes.
|
||||
|
||||
Why:
|
||||
- Auto-discovery updates `SENSORS_TO_LOAD` as new payload fields appear.
|
||||
- Reloading a push-based integration temporarily unloads platforms and removes
|
||||
coordinator listeners, which can make the UI appear "stuck" until restart.
|
||||
"""
|
||||
hass_data_any = hass.data.get(DOMAIN)
|
||||
if isinstance(hass_data_any, dict):
|
||||
hass_data = cast("dict[str, Any]", hass_data_any)
|
||||
entry_data_any = hass_data.get(entry.entry_id)
|
||||
if isinstance(entry_data_any, dict):
|
||||
entry_data = cast("dict[str, Any]", entry_data_any)
|
||||
|
||||
old_options_any = entry_data.get(ENTRY_LAST_OPTIONS)
|
||||
if isinstance(old_options_any, dict):
|
||||
old_options = cast("dict[str, Any]", old_options_any)
|
||||
new_options = dict(entry.options)
|
||||
|
||||
changed_keys = {
|
||||
k
|
||||
for k in set(old_options.keys()) | set(new_options.keys())
|
||||
if old_options.get(k) != new_options.get(k)
|
||||
}
|
||||
|
||||
# Update snapshot early for the next comparison.
|
||||
entry_data[ENTRY_LAST_OPTIONS] = new_options
|
||||
|
||||
if changed_keys == {SENSORS_TO_LOAD}:
|
||||
_LOGGER.debug(
|
||||
"Options updated (%s); skipping reload.", SENSORS_TO_LOAD
|
||||
)
|
||||
return
|
||||
else:
|
||||
# No/invalid snapshot: store current options for next comparison.
|
||||
entry_data[ENTRY_LAST_OPTIONS] = dict(entry.options)
|
||||
|
||||
_ = await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
_LOGGER.info("Settings updated")
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -249,7 +249,7 @@ class UnitOfBat(StrEnum):
|
|||
|
||||
LOW = "low"
|
||||
NORMAL = "normal"
|
||||
UNKNOWN = "unknown"
|
||||
UNKNOWN = "drained"
|
||||
|
||||
|
||||
BATTERY_LEVEL: list[UnitOfBat] = [
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
"""Shared keys for storing integration runtime state in `hass.data`.
|
||||
|
||||
This integration stores runtime state under:
|
||||
|
||||
hass.data[DOMAIN][entry_id] -> dict
|
||||
|
||||
Keeping keys in a dedicated module prevents subtle bugs where different modules
|
||||
store different types under the same key.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
# Per-entry dict keys stored under hass.data[DOMAIN][entry_id]
|
||||
ENTRY_COORDINATOR: Final[str] = "coordinator"
|
||||
ENTRY_ADD_ENTITIES: Final[str] = "async_add_entities"
|
||||
ENTRY_DESCRIPTIONS: Final[str] = "sensor_descriptions"
|
||||
ENTRY_LAST_OPTIONS: Final[str] = "last_options"
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"indoor_battery": {
|
||||
"default": "mdi:battery-unknown",
|
||||
"state": {
|
||||
"low": "mdi:battery-low",
|
||||
"normal": "mdi:battery",
|
||||
"drained": "mdi:battery-alert"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,19 @@
|
|||
"""Routes implementation."""
|
||||
"""Routes implementation.
|
||||
|
||||
Why this dispatcher exists
|
||||
--------------------------
|
||||
Home Assistant registers aiohttp routes on startup. Re-registering or removing routes at runtime
|
||||
is awkward and error-prone (and can raise if routes already exist). This integration supports two
|
||||
different push endpoints (legacy WU-style vs WSLink). To allow switching between them without
|
||||
touching the aiohttp router, we register both routes once and use this in-process dispatcher to
|
||||
decide which one is currently enabled.
|
||||
|
||||
Important note:
|
||||
- Each route stores a *bound method* handler (e.g. `coordinator.received_data`). That means the
|
||||
route points to a specific coordinator instance. When the integration reloads, we must keep the
|
||||
same coordinator instance or update the stored handler accordingly. Otherwise requests may go to
|
||||
an old coordinator while entities listen to a new one (result: UI appears "frozen").
|
||||
"""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass, field
|
||||
|
|
@ -13,7 +28,11 @@ Handler = Callable[[Request], Awaitable[Response]]
|
|||
|
||||
@dataclass
|
||||
class RouteInfo:
|
||||
"""Route struct."""
|
||||
"""Route definition held by the dispatcher.
|
||||
|
||||
- `handler` is the real webhook handler (bound method).
|
||||
- `fallback` is used when the route exists but is currently disabled.
|
||||
"""
|
||||
|
||||
url_path: str
|
||||
handler: Handler
|
||||
|
|
@ -22,14 +41,19 @@ class RouteInfo:
|
|||
|
||||
|
||||
class Routes:
|
||||
"""Routes class."""
|
||||
"""Simple route dispatcher.
|
||||
|
||||
We register aiohttp routes once and direct traffic to the currently enabled endpoint
|
||||
using `switch_route`. This keeps route registration stable while still allowing the
|
||||
integration to support multiple incoming push formats.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Init."""
|
||||
"""Initialize dispatcher storage."""
|
||||
self.routes: dict[str, RouteInfo] = {}
|
||||
|
||||
async def dispatch(self, request: Request) -> Response:
|
||||
"""Dispatch."""
|
||||
"""Dispatch incoming request to either the enabled handler or a fallback."""
|
||||
info = self.routes.get(request.path)
|
||||
if not info:
|
||||
_LOGGER.debug("Route %s is not registered!", request.path)
|
||||
|
|
@ -38,20 +62,27 @@ class Routes:
|
|||
return await handler(request)
|
||||
|
||||
def switch_route(self, url_path: str) -> None:
|
||||
"""Switch route to new handler."""
|
||||
"""Enable exactly one route and disable all others.
|
||||
|
||||
This is called when options change (e.g. WSLink toggle). The aiohttp router stays
|
||||
untouched; we only flip which internal handler is active.
|
||||
"""
|
||||
for path, info in self.routes.items():
|
||||
info.enabled = path == url_path
|
||||
|
||||
def add_route(
|
||||
self, url_path: str, handler: Handler, *, enabled: bool = False
|
||||
) -> None:
|
||||
"""Add route to dispatcher."""
|
||||
"""Register a route in the dispatcher.
|
||||
|
||||
This does not register anything in aiohttp. It only stores routing metadata that
|
||||
`dispatch` uses after aiohttp has routed the request by path.
|
||||
"""
|
||||
self.routes[url_path] = RouteInfo(url_path, handler, enabled=enabled)
|
||||
_LOGGER.debug("Registered dispatcher for route %s", url_path)
|
||||
|
||||
def show_enabled(self) -> str:
|
||||
"""Show info of enabled route."""
|
||||
"""Return a human-readable description of the currently enabled route."""
|
||||
for url, route in self.routes.items():
|
||||
if route.enabled:
|
||||
return (
|
||||
|
|
@ -61,7 +92,11 @@ class Routes:
|
|||
|
||||
|
||||
async def unregistred(request: Request) -> Response:
|
||||
"""Return unregistred error."""
|
||||
"""Fallback response for unknown/disabled routes.
|
||||
|
||||
This should normally never happen for correctly configured stations, but it provides
|
||||
a clear error message when the station pushes to the wrong endpoint.
|
||||
"""
|
||||
_ = request
|
||||
_LOGGER.debug("Received data to unregistred or disabled webhook.")
|
||||
return Response(text="Unregistred webhook. Check your settings.", status=400)
|
||||
|
|
|
|||
|
|
@ -1,18 +1,36 @@
|
|||
"""Sensors definition for SWS12500."""
|
||||
"""Sensor platform for SWS12500.
|
||||
|
||||
This module creates sensor entities based on the config entry options.
|
||||
|
||||
The integration is push-based (webhook), so we avoid reloading the entry for
|
||||
auto-discovered sensors. Instead, we dynamically add new entities at runtime
|
||||
using the `async_add_entities` callback stored in `hass.data`.
|
||||
|
||||
Why not reload on auto-discovery?
|
||||
Reloading a config entry unloads platforms temporarily, which removes coordinator
|
||||
listeners. With frequent webhook pushes, this can create a window where nothing is
|
||||
subscribed and the frontend appears "frozen" until another full reload/restart.
|
||||
|
||||
Runtime state is stored under:
|
||||
hass.data[DOMAIN][entry_id] -> dict with known keys (see `data.py`)
|
||||
"""
|
||||
|
||||
from collections.abc import Callable
|
||||
from functools import cached_property
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from py_typecheck import checked_or
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.entity import DeviceInfo, generate_entity_id
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import WeatherDataUpdateCoordinator
|
||||
from .const import (
|
||||
BATTERY_LIST,
|
||||
CHILL_INDEX,
|
||||
DOMAIN,
|
||||
HEAT_INDEX,
|
||||
|
|
@ -23,133 +41,202 @@ from .const import (
|
|||
WIND_DIR,
|
||||
WIND_SPEED,
|
||||
WSLINK,
|
||||
UnitOfBat,
|
||||
)
|
||||
from .data import ENTRY_ADD_ENTITIES, ENTRY_COORDINATOR, ENTRY_DESCRIPTIONS
|
||||
from .sensors_common import WeatherSensorEntityDescription
|
||||
from .sensors_weather import SENSOR_TYPES_WEATHER_API
|
||||
from .sensors_wslink import SENSOR_TYPES_WSLINK
|
||||
from .utils import battery_level_to_icon, battery_level_to_text, chill_index, heat_index
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# The `async_add_entities` callback accepts a list of Entity-like objects.
|
||||
# We keep the type loose here to avoid propagating HA generics (`DataUpdateCoordinator[T]`)
|
||||
# that often end up as "partially unknown" under type-checkers.
|
||||
_AddEntitiesFn = Callable[[list[SensorEntity]], None]
|
||||
|
||||
|
||||
def _auto_enable_derived_sensors(requested: set[str]) -> set[str]:
|
||||
"""Auto-enable derived sensors when their source fields are present.
|
||||
|
||||
This does NOT model strict dependencies ("if you want X, we force-add inputs").
|
||||
Instead, it opportunistically enables derived outputs when the station already
|
||||
provides the raw fields needed to compute them.
|
||||
"""
|
||||
|
||||
expanded = set(requested)
|
||||
|
||||
# Wind azimut depends on wind dir
|
||||
if WIND_DIR in expanded:
|
||||
expanded.add(WIND_AZIMUT)
|
||||
|
||||
# Heat index depends on temp + humidity
|
||||
if OUTSIDE_TEMP in expanded and OUTSIDE_HUMIDITY in expanded:
|
||||
expanded.add(HEAT_INDEX)
|
||||
|
||||
# Chill index depends on temp + wind speed
|
||||
if OUTSIDE_TEMP in expanded and WIND_SPEED in expanded:
|
||||
expanded.add(CHILL_INDEX)
|
||||
|
||||
return expanded
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Weather Station sensors."""
|
||||
"""Set up Weather Station sensors.
|
||||
|
||||
coordinator: WeatherDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
We also store `async_add_entities` and a map of sensor descriptions in `hass.data`
|
||||
so the webhook handler can add newly discovered entities dynamically without
|
||||
reloading the config entry.
|
||||
"""
|
||||
hass_data_any = hass.data.setdefault(DOMAIN, {})
|
||||
hass_data = cast("dict[str, Any]", hass_data_any)
|
||||
|
||||
sensors_to_load: list = []
|
||||
sensors: list = []
|
||||
_wslink = config_entry.options.get(WSLINK)
|
||||
entry_data_any = hass_data.get(config_entry.entry_id)
|
||||
if not isinstance(entry_data_any, dict):
|
||||
# Created by the integration setup, but keep this defensive for safety.
|
||||
entry_data_any = {}
|
||||
hass_data[config_entry.entry_id] = entry_data_any
|
||||
entry_data = cast("dict[str, Any]", entry_data_any)
|
||||
|
||||
SENSOR_TYPES = SENSOR_TYPES_WSLINK if _wslink else SENSOR_TYPES_WEATHER_API
|
||||
coordinator = entry_data.get(ENTRY_COORDINATOR)
|
||||
if coordinator is None:
|
||||
# Coordinator is created by the integration (`__init__.py`). Without it, we cannot set up entities.
|
||||
# This should not happen in normal operation; treat it as a no-op setup.
|
||||
return
|
||||
|
||||
# Check if we have some sensors to load.
|
||||
if sensors_to_load := config_entry.options.get(SENSORS_TO_LOAD, []):
|
||||
if WIND_DIR in sensors_to_load:
|
||||
sensors_to_load.append(WIND_AZIMUT)
|
||||
if (OUTSIDE_HUMIDITY in sensors_to_load) and (OUTSIDE_TEMP in sensors_to_load):
|
||||
sensors_to_load.append(HEAT_INDEX)
|
||||
# Store the platform callback so we can add entities later (auto-discovery) without reload.
|
||||
entry_data[ENTRY_ADD_ENTITIES] = async_add_entities
|
||||
|
||||
if (WIND_SPEED in sensors_to_load) and (OUTSIDE_TEMP in sensors_to_load):
|
||||
sensors_to_load.append(CHILL_INDEX)
|
||||
sensors = [
|
||||
WeatherSensor(hass, description, coordinator)
|
||||
for description in SENSOR_TYPES
|
||||
if description.key in sensors_to_load
|
||||
]
|
||||
async_add_entities(sensors)
|
||||
wslink_enabled = checked_or(config_entry.options.get(WSLINK), bool, False)
|
||||
sensor_types = SENSOR_TYPES_WSLINK if wslink_enabled else SENSOR_TYPES_WEATHER_API
|
||||
|
||||
# Keep a descriptions map for dynamic entity creation by key.
|
||||
# When the station starts sending a new payload field, the webhook handler can
|
||||
# look up its description here and instantiate the matching entity.
|
||||
entry_data[ENTRY_DESCRIPTIONS] = {desc.key: desc for desc in sensor_types}
|
||||
|
||||
sensors_to_load = checked_or(
|
||||
config_entry.options.get(SENSORS_TO_LOAD), list[str], []
|
||||
)
|
||||
if not sensors_to_load:
|
||||
return
|
||||
|
||||
requested = _auto_enable_derived_sensors(set(sensors_to_load))
|
||||
|
||||
entities: list[WeatherSensor] = [
|
||||
WeatherSensor(description, coordinator)
|
||||
for description in sensor_types
|
||||
if description.key in requested
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
def add_new_sensors(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, keys: list[str]
|
||||
) -> None:
|
||||
"""Dynamically add newly discovered sensors without reloading the entry.
|
||||
|
||||
Called by the webhook handler when the station starts sending new fields.
|
||||
|
||||
Design notes:
|
||||
- This function is intentionally a safe no-op if the sensor platform hasn't
|
||||
finished setting up yet (e.g. callback/description map missing).
|
||||
- Unknown payload keys are ignored (only keys with an entity description are added).
|
||||
"""
|
||||
hass_data_any = hass.data.get(DOMAIN)
|
||||
if not isinstance(hass_data_any, dict):
|
||||
return
|
||||
hass_data = cast("dict[str, Any]", hass_data_any)
|
||||
|
||||
entry_data_any = hass_data.get(config_entry.entry_id)
|
||||
if not isinstance(entry_data_any, dict):
|
||||
return
|
||||
entry_data = cast("dict[str, Any]", entry_data_any)
|
||||
|
||||
add_entities_any = entry_data.get(ENTRY_ADD_ENTITIES)
|
||||
descriptions_any = entry_data.get(ENTRY_DESCRIPTIONS)
|
||||
coordinator_any = entry_data.get(ENTRY_COORDINATOR)
|
||||
|
||||
if add_entities_any is None or descriptions_any is None or coordinator_any is None:
|
||||
return
|
||||
|
||||
add_entities_fn = cast("_AddEntitiesFn", add_entities_any)
|
||||
descriptions_map = cast(
|
||||
"dict[str, WeatherSensorEntityDescription]", descriptions_any
|
||||
)
|
||||
|
||||
new_entities: list[SensorEntity] = []
|
||||
for key in keys:
|
||||
desc = descriptions_map.get(key)
|
||||
if desc is None:
|
||||
continue
|
||||
new_entities.append(WeatherSensor(desc, coordinator_any))
|
||||
|
||||
if new_entities:
|
||||
add_entities_fn(new_entities)
|
||||
|
||||
|
||||
class WeatherSensor( # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
CoordinatorEntity[WeatherDataUpdateCoordinator], SensorEntity
|
||||
CoordinatorEntity, SensorEntity
|
||||
): # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
"""Implementation of Weather Sensor entity."""
|
||||
"""Implementation of Weather Sensor entity.
|
||||
|
||||
We intentionally keep the coordinator type unparameterized here to avoid
|
||||
propagating HA's generic `DataUpdateCoordinator[T]` typing into this module.
|
||||
"""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
description: WeatherSensorEntityDescription,
|
||||
coordinator: WeatherDataUpdateCoordinator,
|
||||
coordinator: Any,
|
||||
) -> None:
|
||||
"""Initialize sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.hass = hass
|
||||
self.coordinator = coordinator
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = description.key
|
||||
self._data = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle listeners to reloaded sensors."""
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
self.coordinator.async_add_listener(self._handle_coordinator_update)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._data = self.coordinator.data.get(self.entity_description.key)
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def native_value(self): # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
"""Return value of entity."""
|
||||
"""Return the current sensor state.
|
||||
|
||||
_wslink = self.coordinator.config.options.get(WSLINK)
|
||||
Resolution order:
|
||||
1) If `value_from_data_fn` is provided, it receives the full payload dict and can compute
|
||||
derived values (e.g. battery enum mapping, azimut text, heat/chill indices).
|
||||
2) Otherwise we read the raw value for this key from the payload and pass it through `value_fn`.
|
||||
|
||||
if self.coordinator.data and (WIND_AZIMUT in self.entity_description.key):
|
||||
return self.entity_description.value_fn(self.coordinator.data.get(WIND_DIR)) # pyright: ignore[ reportAttributeAccessIssue]
|
||||
Payload normalization:
|
||||
- The station sometimes sends empty strings for missing fields; we treat "" as no value (None).
|
||||
"""
|
||||
data: dict[str, Any] = checked_or(self.coordinator.data, dict[str, Any], {})
|
||||
key = self.entity_description.key
|
||||
|
||||
if (
|
||||
self.coordinator.data
|
||||
and (HEAT_INDEX in self.entity_description.key)
|
||||
and not _wslink
|
||||
):
|
||||
return self.entity_description.value_fn(heat_index(self.coordinator.data)) # pyright: ignore[ reportAttributeAccessIssue]
|
||||
description = cast("WeatherSensorEntityDescription", self.entity_description)
|
||||
if description.value_from_data_fn is not None:
|
||||
return description.value_from_data_fn(data)
|
||||
|
||||
if (
|
||||
self.coordinator.data
|
||||
and (CHILL_INDEX in self.entity_description.key)
|
||||
and not _wslink
|
||||
):
|
||||
return self.entity_description.value_fn(chill_index(self.coordinator.data)) # pyright: ignore[ reportAttributeAccessIssue]
|
||||
raw = data.get(key)
|
||||
if raw is None or raw == "":
|
||||
return None
|
||||
|
||||
return (
|
||||
None if self._data == "" else self.entity_description.value_fn(self._data) # pyright: ignore[ reportAttributeAccessIssue]
|
||||
)
|
||||
if description.value_fn is None:
|
||||
return None
|
||||
|
||||
return description.value_fn(raw)
|
||||
|
||||
@property
|
||||
def suggested_entity_id(self) -> str:
|
||||
"""Return name."""
|
||||
return generate_entity_id("sensor.{}", self.entity_description.key)
|
||||
|
||||
@property
|
||||
def icon(self) -> str | None: # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
"""Return the dynamic icon for battery representation."""
|
||||
|
||||
if self.entity_description.key in BATTERY_LIST:
|
||||
if self.native_value:
|
||||
battery_level = battery_level_to_text(self.native_value)
|
||||
return battery_level_to_icon(battery_level)
|
||||
|
||||
return battery_level_to_icon(UnitOfBat.UNKNOWN)
|
||||
|
||||
return self.entity_description.icon
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo: # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
@cached_property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Device info."""
|
||||
return DeviceInfo(
|
||||
connections=set(),
|
||||
|
|
|
|||
|
|
@ -11,4 +11,7 @@ from homeassistant.components.sensor import SensorEntityDescription
|
|||
class WeatherSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describe Weather Sensor entities."""
|
||||
|
||||
value_fn: Callable[[Any], int | float | str | None]
|
||||
value_fn: Callable[[Any], int | float | str | None] | None = None
|
||||
value_from_data_fn: Callable[[dict[str, Any]], int | float | str | None] | None = (
|
||||
None
|
||||
)
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ from .const import (
|
|||
UnitOfDir,
|
||||
)
|
||||
from .sensors_common import WeatherSensorEntityDescription
|
||||
from .utils import wind_dir_to_text
|
||||
from .utils import chill_index, heat_index, wind_dir_to_text
|
||||
|
||||
SENSOR_TYPES_WEATHER_API: tuple[WeatherSensorEntityDescription, ...] = (
|
||||
WeatherSensorEntityDescription(
|
||||
|
|
@ -133,8 +133,11 @@ SENSOR_TYPES_WEATHER_API: tuple[WeatherSensorEntityDescription, ...] = (
|
|||
key=WIND_AZIMUT,
|
||||
icon="mdi:sign-direction",
|
||||
value_fn=lambda data: cast("str", wind_dir_to_text(data)),
|
||||
value_from_data_fn=lambda data: cast(
|
||||
"str", wind_dir_to_text(cast("float", data.get(WIND_DIR) or 0.0))
|
||||
),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(UnitOfDir),
|
||||
options=[e.value for e in UnitOfDir],
|
||||
translation_key=WIND_AZIMUT,
|
||||
),
|
||||
WeatherSensorEntityDescription(
|
||||
|
|
@ -244,6 +247,7 @@ SENSOR_TYPES_WEATHER_API: tuple[WeatherSensorEntityDescription, ...] = (
|
|||
icon="mdi:weather-sunny",
|
||||
translation_key=HEAT_INDEX,
|
||||
value_fn=lambda data: cast("int", data),
|
||||
value_from_data_fn=lambda data: heat_index(data),
|
||||
),
|
||||
WeatherSensorEntityDescription(
|
||||
key=CHILL_INDEX,
|
||||
|
|
@ -255,5 +259,6 @@ SENSOR_TYPES_WEATHER_API: tuple[WeatherSensorEntityDescription, ...] = (
|
|||
icon="mdi:weather-sunny",
|
||||
translation_key=CHILL_INDEX,
|
||||
value_fn=lambda data: cast("int", data),
|
||||
value_from_data_fn=lambda data: chill_index(data),
|
||||
),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -44,10 +44,11 @@ from .const import (
|
|||
WIND_GUST,
|
||||
WIND_SPEED,
|
||||
YEARLY_RAIN,
|
||||
UnitOfBat,
|
||||
UnitOfDir,
|
||||
)
|
||||
from .sensors_common import WeatherSensorEntityDescription
|
||||
from .utils import wind_dir_to_text
|
||||
from .utils import battery_level, wind_dir_to_text
|
||||
|
||||
SENSOR_TYPES_WSLINK: tuple[WeatherSensorEntityDescription, ...] = (
|
||||
WeatherSensorEntityDescription(
|
||||
|
|
@ -139,8 +140,11 @@ SENSOR_TYPES_WSLINK: tuple[WeatherSensorEntityDescription, ...] = (
|
|||
key=WIND_AZIMUT,
|
||||
icon="mdi:sign-direction",
|
||||
value_fn=lambda data: cast("str", wind_dir_to_text(data)),
|
||||
value_from_data_fn=lambda data: cast(
|
||||
"str", wind_dir_to_text(cast("float", data.get(WIND_DIR) or 0.0))
|
||||
),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(UnitOfDir),
|
||||
options=[e.value for e in UnitOfDir],
|
||||
translation_key=WIND_AZIMUT,
|
||||
),
|
||||
WeatherSensorEntityDescription(
|
||||
|
|
@ -265,25 +269,6 @@ SENSOR_TYPES_WSLINK: tuple[WeatherSensorEntityDescription, ...] = (
|
|||
translation_key=CH3_HUMIDITY,
|
||||
value_fn=lambda data: cast("int", data),
|
||||
),
|
||||
# WeatherSensorEntityDescription(
|
||||
# key=CH4_TEMP,
|
||||
# native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||
# state_class=SensorStateClass.MEASUREMENT,
|
||||
# device_class=SensorDeviceClass.TEMPERATURE,
|
||||
# suggested_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
# icon="mdi:weather-sunny",
|
||||
# translation_key=CH4_TEMP,
|
||||
# value_fn=lambda data: cast(float, data),
|
||||
# ),
|
||||
# WeatherSensorEntityDescription(
|
||||
# key=CH4_HUMIDITY,
|
||||
# native_unit_of_measurement=PERCENTAGE,
|
||||
# state_class=SensorStateClass.MEASUREMENT,
|
||||
# device_class=SensorDeviceClass.HUMIDITY,
|
||||
# icon="mdi:weather-sunny",
|
||||
# translation_key=CH4_HUMIDITY,
|
||||
# value_fn=lambda data: cast(int, data),
|
||||
# ),
|
||||
WeatherSensorEntityDescription(
|
||||
key=HEAT_INDEX,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
|
|
@ -309,23 +294,32 @@ SENSOR_TYPES_WSLINK: tuple[WeatherSensorEntityDescription, ...] = (
|
|||
WeatherSensorEntityDescription(
|
||||
key=OUTSIDE_BATTERY,
|
||||
translation_key=OUTSIDE_BATTERY,
|
||||
icon="mdi:battery-unknown",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda data: (data),
|
||||
options=[e.value for e in UnitOfBat],
|
||||
value_fn=None,
|
||||
value_from_data_fn=lambda data: battery_level(
|
||||
data.get(OUTSIDE_BATTERY, None)
|
||||
).value,
|
||||
),
|
||||
WeatherSensorEntityDescription(
|
||||
key=CH2_BATTERY,
|
||||
translation_key=CH2_BATTERY,
|
||||
icon="mdi:battery-unknown",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda data: (data),
|
||||
options=[e.value for e in UnitOfBat],
|
||||
value_fn=None,
|
||||
value_from_data_fn=lambda data: battery_level(
|
||||
data.get(CH2_BATTERY, None)
|
||||
).value,
|
||||
),
|
||||
WeatherSensorEntityDescription(
|
||||
key=INDOOR_BATTERY,
|
||||
translation_key=INDOOR_BATTERY,
|
||||
icon="mdi:battery-unknown",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda data: (data),
|
||||
options=[e.value for e in UnitOfBat],
|
||||
value_fn=None,
|
||||
value_from_data_fn=lambda data: battery_level(
|
||||
data.get(INDOOR_BATTERY, None)
|
||||
).value,
|
||||
),
|
||||
WeatherSensorEntityDescription(
|
||||
key=WBGT_TEMP,
|
||||
|
|
|
|||
|
|
@ -87,6 +87,18 @@
|
|||
"pocasi_logger_checkbox": "Enable only if you want to send debbug data to the developer"
|
||||
}
|
||||
},
|
||||
"ecowitt": {
|
||||
"description": "Nastavení pro Ecowitt",
|
||||
"title": "Konfigurace pro stanice Ecowitt",
|
||||
"data": {
|
||||
"ecowitt_webhook_id": "Unikátní webhook ID",
|
||||
"ecowitt_enabled": "Povolit data ze stanice Ecowitt"
|
||||
},
|
||||
"data_description": {
|
||||
"ecowitt_webhook_id": "Nastavení pro stanici: {url}:{port}/weatherhub/{webhook_id}",
|
||||
"ecowitt_enabled": "Povolit přijímání dat ze stanic Ecowitt"
|
||||
}
|
||||
},
|
||||
"migration": {
|
||||
"title": "Statistic migration.",
|
||||
"description": "For the correct functioning of long-term statistics, it is necessary to migrate the sensor unit in the long-term statistics. The original unit of long-term statistics for daily precipitation was in mm/d, however, the station only sends data in mm without time differentiation.\n\n The sensor to be migrated is for daily precipitation. If the correct value is already in the list for the daily precipitation sensor (mm), then the migration is already complete.\n\n Migration result for the sensor: {migration_status}, a total of {migration_count} rows converted.",
|
||||
|
|
@ -166,6 +178,21 @@
|
|||
"chill_index": {
|
||||
"name": "Wind chill"
|
||||
},
|
||||
"hourly_rain": {
|
||||
"name": "Hourly precipitation"
|
||||
},
|
||||
"weekly_rain": {
|
||||
"name": "Weekly precipitation"
|
||||
},
|
||||
"monthly_rain": {
|
||||
"name": "Monthly precipitation"
|
||||
},
|
||||
"yearly_rain": {
|
||||
"name": "Yearly precipitation"
|
||||
},
|
||||
"wbgt_index": {
|
||||
"name": "WBGT index"
|
||||
},
|
||||
"wind_azimut": {
|
||||
"name": "Bearing",
|
||||
"state": {
|
||||
|
|
@ -185,14 +212,30 @@
|
|||
"wnw": "WNW",
|
||||
"nw": "NW",
|
||||
"nnw": "NNW"
|
||||
},
|
||||
"outside_battery": {
|
||||
"name": "Outside battery level",
|
||||
"state": {
|
||||
"normal": "OK",
|
||||
"low": "Low",
|
||||
"unknown": "Unknown / drained out"
|
||||
}
|
||||
}
|
||||
},
|
||||
"outside_battery": {
|
||||
"name": "Outside battery level",
|
||||
"state": {
|
||||
"normal": "OK",
|
||||
"low": "Low",
|
||||
"unknown": "Unknown / drained out"
|
||||
}
|
||||
},
|
||||
"ch2_battery": {
|
||||
"name": "Channel 2 battery level",
|
||||
"state": {
|
||||
"normal": "OK",
|
||||
"low": "Low",
|
||||
"unknown": "Unknown / drained out"
|
||||
}
|
||||
},
|
||||
"indoor_battery": {
|
||||
"name": "Console battery level",
|
||||
"state": {
|
||||
"normal": "OK",
|
||||
"low": "Low",
|
||||
"unknown": "Unknown / drained out"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -233,7 +233,7 @@
|
|||
"state": {
|
||||
"low": "Nízká",
|
||||
"normal": "Normální",
|
||||
"unknown": "Neznámá / zcela vybitá"
|
||||
"drained": "Neznámá / zcela vybitá"
|
||||
}
|
||||
},
|
||||
"ch2_battery": {
|
||||
|
|
|
|||
|
|
@ -1,12 +1,21 @@
|
|||
"""Utils for SWS12500."""
|
||||
"""Utils for SWS12500.
|
||||
|
||||
This module contains small helpers used across the integration.
|
||||
|
||||
Notable responsibilities:
|
||||
- Payload remapping: convert raw station/webhook field names into stable internal keys.
|
||||
- Auto-discovery helpers: detect new payload fields that are not enabled yet and persist them
|
||||
to config entry options so sensors can be created dynamically.
|
||||
- Formatting/conversion helpers (wind direction text, battery mapping, temperature conversions).
|
||||
|
||||
Keeping these concerns in one place avoids duplicating logic in the webhook handler and entity code.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import math
|
||||
from multiprocessing import Value
|
||||
from typing import Any, cast
|
||||
|
||||
import numpy as np
|
||||
from py_typecheck import checked
|
||||
from py_typecheck.core import checked_or
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
|
|
@ -106,24 +115,35 @@ async def update_options(
|
|||
def anonymize(
|
||||
data: dict[str, str | int | float | bool],
|
||||
) -> dict[str, str | int | float | bool]:
|
||||
"""Anoynimize recieved data."""
|
||||
anonym: dict[str, str] = {}
|
||||
return {
|
||||
anonym[key]: value
|
||||
for key, value in data.items()
|
||||
if key not in {"ID", "PASSWORD", "wsid", "wspw"}
|
||||
}
|
||||
"""Anonymize received data for safe logging.
|
||||
|
||||
- Keep all keys, but mask sensitive values.
|
||||
- Do not raise on unexpected/missing keys.
|
||||
"""
|
||||
secrets = {"ID", "PASSWORD", "wsid", "wspw"}
|
||||
|
||||
return {k: ("***" if k in secrets else v) for k, v in data.items()}
|
||||
|
||||
|
||||
def remap_items(entities: dict[str, str]) -> dict[str, str]:
|
||||
"""Remap items in query."""
|
||||
"""Remap legacy (WU-style) payload field names into internal sensor keys.
|
||||
|
||||
The station sends short/legacy field names (e.g. "tempf", "humidity"). Internally we use
|
||||
stable keys from `const.py` (e.g. "outside_temp", "outside_humidity"). This function produces
|
||||
a normalized dict that the rest of the integration can work with.
|
||||
"""
|
||||
return {
|
||||
REMAP_ITEMS[key]: value for key, value in entities.items() if key in REMAP_ITEMS
|
||||
}
|
||||
|
||||
|
||||
def remap_wslink_items(entities: dict[str, str]) -> dict[str, str]:
|
||||
"""Remap items in query for WSLink API."""
|
||||
"""Remap WSLink payload field names into internal sensor keys.
|
||||
|
||||
WSLink uses a different naming scheme than the legacy endpoint (e.g. "t1tem", "t1ws").
|
||||
Just like `remap_items`, this function normalizes the payload to the integration's stable
|
||||
internal keys.
|
||||
"""
|
||||
return {
|
||||
REMAP_WSLINK_ITEMS[key]: value
|
||||
for key, value in entities.items()
|
||||
|
|
@ -132,19 +152,32 @@ def remap_wslink_items(entities: dict[str, str]) -> dict[str, str]:
|
|||
|
||||
|
||||
def loaded_sensors(config_entry: ConfigEntry) -> list[str]:
|
||||
"""Get loaded sensors."""
|
||||
"""Return sensor keys currently enabled for this config entry.
|
||||
|
||||
Auto-discovery persists new keys into `config_entry.options[SENSORS_TO_LOAD]`. The sensor
|
||||
platform uses this list to decide which entities to create.
|
||||
"""
|
||||
return config_entry.options.get(SENSORS_TO_LOAD) or []
|
||||
|
||||
|
||||
def check_disabled(
|
||||
items: dict[str, str], config_entry: ConfigEntry
|
||||
) -> list[str] | None:
|
||||
"""Check if we have data for unloaded sensors.
|
||||
"""Detect payload fields that are not enabled yet (auto-discovery).
|
||||
|
||||
If so, then add sensor to load queue.
|
||||
The integration supports "auto-discovery" of sensors: when the station starts sending a new
|
||||
field, we can automatically enable and create the corresponding entity.
|
||||
|
||||
This helper compares the normalized payload keys (`items`) with the currently enabled sensor
|
||||
keys stored in options (`SENSORS_TO_LOAD`) and returns the missing keys.
|
||||
|
||||
Returns:
|
||||
- list[str] of newly discovered sensor keys (to be added/enabled), or
|
||||
- None if no new keys were found.
|
||||
|
||||
Notes:
|
||||
- Logging is controlled via `DEV_DBG` because payloads can arrive frequently.
|
||||
|
||||
Returns list of found sensors or None
|
||||
"""
|
||||
|
||||
log = checked_or(config_entry.options.get(DEV_DBG), bool, False)
|
||||
|
|
@ -178,9 +211,12 @@ def wind_dir_to_text(deg: float) -> UnitOfDir | None:
|
|||
return None
|
||||
|
||||
|
||||
def battery_level(battery: int) -> UnitOfBat:
|
||||
def battery_level(battery: int | str | None) -> UnitOfBat:
|
||||
"""Return battery level.
|
||||
|
||||
WSLink payload values often arrive as strings (e.g. "0"/"1"), so we accept
|
||||
both ints and strings and coerce to int before mapping.
|
||||
|
||||
Returns UnitOfBat
|
||||
"""
|
||||
|
||||
|
|
@ -189,10 +225,19 @@ def battery_level(battery: int) -> UnitOfBat:
|
|||
1: UnitOfBat.NORMAL,
|
||||
}
|
||||
|
||||
if (v := checked(battery, int)) is None:
|
||||
if (battery is None) or (battery == ""):
|
||||
return UnitOfBat.UNKNOWN
|
||||
|
||||
return level_map.get(v, UnitOfBat.UNKNOWN)
|
||||
vi: int
|
||||
if isinstance(battery, int):
|
||||
vi = battery
|
||||
else:
|
||||
try:
|
||||
vi = int(battery)
|
||||
except ValueError:
|
||||
return UnitOfBat.UNKNOWN
|
||||
|
||||
return level_map.get(vi, UnitOfBat.UNKNOWN)
|
||||
|
||||
|
||||
def battery_level_to_icon(battery: UnitOfBat) -> str:
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
from py_typecheck.core import checked
|
||||
|
|
@ -26,8 +25,6 @@ from .utils import update_options
|
|||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RESPONSE_FOR_TEST = False
|
||||
|
||||
|
||||
class WindyNotInserted(Exception):
|
||||
"""NotInserted state."""
|
||||
|
|
@ -54,8 +51,8 @@ class WindyPush:
|
|||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigEntry) -> None:
|
||||
"""Init."""
|
||||
self.hass: Final = hass
|
||||
self.config: Final = config
|
||||
self.hass = hass
|
||||
self.config = config
|
||||
|
||||
""" lets wait for 1 minute to get initial data from station
|
||||
and then try to push first data to Windy
|
||||
|
|
@ -66,7 +63,7 @@ class WindyPush:
|
|||
self.log: bool = self.config.options.get(WINDY_LOGGER_ENABLED, False)
|
||||
self.invalid_response_count: int = 0
|
||||
|
||||
def verify_windy_response( # pylint: disable=useless-return
|
||||
def verify_windy_response(
|
||||
self,
|
||||
response: str,
|
||||
):
|
||||
|
|
@ -87,7 +84,7 @@ class WindyPush:
|
|||
if "Unauthorized" in response:
|
||||
raise WindyApiKeyError
|
||||
|
||||
async def push_data_to_windy(self, data: dict[str, str]) -> bool:
|
||||
async def push_data_to_windy(self, data: dict[str, str]) -> bool:
|
||||
"""Pushes weather data do Windy stations.
|
||||
|
||||
Interval is 5 minutes, otherwise Windy would not accepts data.
|
||||
|
|
|
|||
Loading…
Reference in New Issue