HA Phase A: rebuild coordinator + binary_sensor on v1.0 client + JOURNEY.md

custom_components/omni_pca/coordinator.py — full rewrite:
- Long-lived OmniClient for entry lifetime
- One-shot discovery: system info + zone/unit/area/thermostat/button names
  via list_*_names + per-index get_object_properties
- Periodic poll (30s default): get_extended_status for zones/units/thermostats,
  get_object_status for areas, skip empty discoveries
- Background _run_event_listener task consuming client.events(), patches
  state in-place and async_set_updated_data on push:
    ZoneStateChanged    -> patch zone_status raw byte
    UnitStateChanged    -> patch unit_status state, preserve brightness
    ArmingChanged       -> patch area_status mode + last_user
    AlarmActivated/Cleared -> trigger refresh
    AcLost/Restored, BatteryLow/Restored -> recorded for sensors
- InvalidEncryptionKeyError/HandshakeError -> ConfigEntryAuthFailed (HA reauth)
- OmniConnectionError/RequestTimeoutError -> UpdateFailed + drop client
- Event task cancelled in async_shutdown

custom_components/omni_pca/binary_sensor.py — full rewrite:
- OmniZoneBinarySensor per discovered zone (device class from zone type:
  smoke/water/freeze use latched-alarm bit; doors/motion use current condition)
- OmniZoneBypassedBinarySensor per zone (DIAGNOSTIC, PROBLEM)
- OmniSystemAcBinarySensor (POWER, prefers AcLost/AcRestored push)
- OmniSystemBatteryBinarySensor (BATTERY)
- OmniSystemTroubleBinarySensor (PROBLEM)

custom_components/omni_pca/helpers.py — pure functions extracted for testing:
- device_class_for_zone_type, is_binary_zone_type, use_latched_alarm_for_zone,
  prettify_name. 61 unit tests in tests/test_ha_helpers.py.

docs/JOURNEY.md — 4383-word raw chronological retrospective of the whole
arc from binary archive to working library. 18 dated sections including
the 2191-byte magic-number header validation moment, the two non-public
protocol quirks, the offline-panel comedy. Source material for future
writeups (intentionally raw, not polished).

264 tests pass (was 203, +61 helper tests). Ruff clean across all dirs.
This commit is contained in:
Ryan Malloy 2026-05-10 14:48:50 -06:00
parent c26db62959
commit e8ed7d1b89
7 changed files with 1800 additions and 196 deletions

View File

@ -1,4 +1,9 @@
"""HAI/Leviton Omni Panel integration for Home Assistant.""" """HAI/Leviton Omni Panel integration for Home Assistant.
Phase A entry point. Phase B will append additional platforms (light,
switch, climate, alarm_control_panel, sensor, scene, button, event) to
:data:`PLATFORMS`; nothing else here changes.
"""
from __future__ import annotations from __future__ import annotations
@ -44,7 +49,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
try: try:
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()
except ConfigEntryNotReady: except ConfigEntryNotReady:
# Re-raise so HA retries with backoff; clean up any half-open client. # Re-raise so HA retries with backoff; clean up any half-open client
# *and* the background event task spawned by the first refresh.
await coordinator.async_shutdown() await coordinator.async_shutdown()
raise raise
@ -54,7 +60,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry.
``coordinator.async_shutdown()`` cancels the long-lived event-listener
task and closes the ``OmniClient`` socket, so HA's reload doesn't
leak a background coroutine or a half-open TCP connection.
"""
unloaded = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) unloaded = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unloaded: if unloaded:
coordinator: OmniDataUpdateCoordinator = hass.data[DOMAIN].pop(entry.entry_id) coordinator: OmniDataUpdateCoordinator = hass.data[DOMAIN].pop(entry.entry_id)

View File

@ -1,17 +1,55 @@
"""Binary sensor platform: one entity per Omni zone.""" """Binary sensor platform for the omni_pca integration.
Per-zone entities
-----------------
* :class:`OmniZoneBinarySensor` one per discovered zone. ``is_on``
derives from :class:`~omni_pca.models.ZoneStatus`. The HA device class
is picked from the zone-type byte by
:func:`~custom_components.omni_pca.helpers.device_class_for_zone_type`.
* :class:`OmniZoneBypassedBinarySensor` one per discovered zone.
Diagnostic entity (``problem`` device-class) that turns on when the
zone is currently bypassed by the user or auto-bypassed by the panel.
Panel-level entities
--------------------
* :class:`OmniSystemAcBinarySensor` ``power``-class. ``is_on`` = AC OK.
Tracks both the periodic SystemStatus poll and any pushed
:class:`~omni_pca.events.AcLost` / :class:`~omni_pca.events.AcRestored`
events so HA reacts immediately on a power-blip.
* :class:`OmniSystemBatteryBinarySensor` ``battery``-class. ``is_on``
when the backup battery reading drops below the panel's threshold
(or a :class:`~omni_pca.events.BatteryLow` event came in since the
last :class:`~omni_pca.events.BatteryRestored`).
* :class:`OmniSystemTroubleBinarySensor` ``problem``-class. ``is_on``
when SystemStatus reports any troubles.
"""
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING from typing import TYPE_CHECKING, Any
from homeassistant.components.binary_sensor import ( from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass, BinarySensorDeviceClass,
BinarySensorEntity, BinarySensorEntity,
) )
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity
from omni_pca.events import (
AcLost,
AcRestored,
BatteryLow,
BatteryRestored,
)
from .const import DOMAIN from .const import DOMAIN
from .coordinator import OmniDataUpdateCoordinator from .coordinator import OmniDataUpdateCoordinator
from .helpers import (
device_class_for_zone_type,
is_binary_zone_type,
prettify_name,
use_latched_alarm_for_zone,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
@ -19,61 +57,49 @@ if TYPE_CHECKING:
from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_platform import AddEntitiesCallback
# Best-effort mapping from Omni zone-type byte (enuZoneType) to HA device
# class. Anything not listed falls back to OPENING — a sane default for
# perimeter contacts, which dominate residential installs. We pick this
# explicitly rather than guessing motion vs. door from the name.
#
# Reference: HAI_Shared/enuZoneType.cs (subset).
_ZONE_TYPE_TO_DEVICE_CLASS: dict[int, BinarySensorDeviceClass] = {
0: BinarySensorDeviceClass.OPENING, # Perimeter
1: BinarySensorDeviceClass.OPENING, # PerimeterEntryExit
2: BinarySensorDeviceClass.MOTION, # Interior (typically PIR)
3: BinarySensorDeviceClass.MOTION, # InteriorAuto
4: BinarySensorDeviceClass.SAFETY, # Tamper
5: BinarySensorDeviceClass.SMOKE, # Fire
6: BinarySensorDeviceClass.SAFETY, # PoliceEmergency
7: BinarySensorDeviceClass.SAFETY, # Duress
8: BinarySensorDeviceClass.SOUND, # Auxiliary
32: BinarySensorDeviceClass.SMOKE, # Auxiliary fire
33: BinarySensorDeviceClass.GAS,
34: BinarySensorDeviceClass.MOISTURE,
80: BinarySensorDeviceClass.MOTION, # AwayInterior
81: BinarySensorDeviceClass.MOTION, # NightInterior
}
async def async_setup_entry( async def async_setup_entry(
hass: HomeAssistant, hass: HomeAssistant,
entry: ConfigEntry, entry: ConfigEntry,
async_add_entities: AddEntitiesCallback, async_add_entities: AddEntitiesCallback,
) -> None: ) -> None:
"""Create one binary_sensor per zone the panel reported.""" """Create one binary_sensor per discovered zone, plus system-level entities."""
coordinator: OmniDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] coordinator: OmniDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
entities = [ entities: list[BinarySensorEntity] = []
OmniZoneBinarySensor(coordinator, index)
for index in sorted(coordinator.data.zones) for index in sorted(coordinator.data.zones):
] props = coordinator.data.zones[index]
if not is_binary_zone_type(props.zone_type):
# Analog zones (temperature, humidity) aren't binary sensors;
# Phase B will surface them on the sensor platform.
continue
entities.append(OmniZoneBinarySensor(coordinator, index))
entities.append(OmniZoneBypassedBinarySensor(coordinator, index))
entities.append(OmniSystemAcBinarySensor(coordinator))
entities.append(OmniSystemBatteryBinarySensor(coordinator))
entities.append(OmniSystemTroubleBinarySensor(coordinator))
async_add_entities(entities) async_add_entities(entities)
class OmniZoneBinarySensor( # --------------------------------------------------------------------------
# Zone entities
# --------------------------------------------------------------------------
class _OmniZoneBaseEntity(
CoordinatorEntity[OmniDataUpdateCoordinator], BinarySensorEntity CoordinatorEntity[OmniDataUpdateCoordinator], BinarySensorEntity
): ):
"""A single zone exposed as a binary_sensor.""" """Shared boilerplate for the two per-zone entities."""
_attr_has_entity_name = True _attr_has_entity_name = True
def __init__(self, coordinator: OmniDataUpdateCoordinator, index: int) -> None: def __init__(
self, coordinator: OmniDataUpdateCoordinator, index: int
) -> None:
super().__init__(coordinator) super().__init__(coordinator)
self._index = index self._index = index
self._attr_unique_id = f"{coordinator.unique_id}-zone-{index}"
self._attr_device_info = coordinator.device_info self._attr_device_info = coordinator.device_info
zone = coordinator.data.zones[index]
self._attr_name = _prettify(zone.name)
self._attr_device_class = _ZONE_TYPE_TO_DEVICE_CLASS.get(
zone.zone_type, BinarySensorDeviceClass.OPENING
)
@property @property
def available(self) -> bool: def available(self) -> bool:
@ -83,33 +109,202 @@ class OmniZoneBinarySensor(
and self._index in self.coordinator.data.zones and self._index in self.coordinator.data.zones
) )
@property
def _zone_props(self): # type: ignore[no-untyped-def]
return self.coordinator.data.zones.get(self._index)
@property
def _zone_status(self): # type: ignore[no-untyped-def]
return self.coordinator.data.zone_status.get(self._index)
class OmniZoneBinarySensor(_OmniZoneBaseEntity):
"""A single zone exposed as the primary binary_sensor.
Live ``is_on`` derives from the matching :class:`ZoneStatus`:
* For motion / smoke / water / freeze / panic / tamper zones we use
the *latched* tripped bit so a brief pulse stays visible until the
user clears the alarm
(see :func:`~custom_components.omni_pca.helpers.use_latched_alarm_for_zone`).
* For door / window / opening zones we use the *current condition*
bit so HA tracks the door truthfully.
"""
def __init__(
self, coordinator: OmniDataUpdateCoordinator, index: int
) -> None:
super().__init__(coordinator, index)
self._attr_unique_id = f"{coordinator.unique_id}-zone-{index}"
props = coordinator.data.zones[index]
self._attr_name = prettify_name(props.name) or f"Zone {index}"
self._attr_device_class = BinarySensorDeviceClass(
device_class_for_zone_type(props.zone_type)
)
@property
def is_on(self) -> bool | None:
status = self._zone_status
props = self._zone_props
if status is None or props is None:
return None
# Pick the right bit based on zone type — latched-alarm zones
# (smoke, water, panic, …) stay "on" until cleared even after a
# one-shot trip, while contact / motion zones track the live
# current condition bit.
if use_latched_alarm_for_zone(props.zone_type):
return status.is_in_alarm
return status.is_open
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
status = self._zone_status
props = self._zone_props
if status is None or props is None:
return None
return {
"zone_index": self._index,
"zone_type": props.zone_type,
"area": props.area,
"is_open": status.is_open,
"is_bypassed": status.is_bypassed,
"is_in_alarm": status.is_in_alarm,
"is_trouble": status.is_trouble,
"loop_reading": status.loop,
"raw_status": status.raw_status,
}
class OmniZoneBypassedBinarySensor(_OmniZoneBaseEntity):
"""Diagnostic entity that turns on when a zone is bypassed.
Surfacing bypass as its own entity (rather than just an attribute on
the primary sensor) lets automations key on it directly e.g.
"remind me at 10pm if any zone is still bypassed".
"""
_attr_entity_category = EntityCategory.DIAGNOSTIC
_attr_device_class = BinarySensorDeviceClass.PROBLEM
def __init__(
self, coordinator: OmniDataUpdateCoordinator, index: int
) -> None:
super().__init__(coordinator, index)
self._attr_unique_id = f"{coordinator.unique_id}-zone-{index}-bypassed"
props = coordinator.data.zones[index]
base = prettify_name(props.name) or f"Zone {index}"
self._attr_name = f"{base} Bypassed"
@property
def is_on(self) -> bool | None:
status = self._zone_status
if status is None:
return None
return status.is_bypassed
# --------------------------------------------------------------------------
# System-level entities
# --------------------------------------------------------------------------
class _OmniSystemBaseEntity(
CoordinatorEntity[OmniDataUpdateCoordinator], BinarySensorEntity
):
"""Shared boilerplate for hub-scoped system binary sensors."""
_attr_has_entity_name = True
_attr_entity_category = EntityCategory.DIAGNOSTIC
def __init__(self, coordinator: OmniDataUpdateCoordinator) -> None:
super().__init__(coordinator)
self._attr_device_info = coordinator.device_info
class OmniSystemAcBinarySensor(_OmniSystemBaseEntity):
"""``power`` device class — on when mains AC is present.
Uses the most recent :class:`AcLost` / :class:`AcRestored` push event
as the authoritative signal, falling back to the SystemStatus battery
heuristic when no event has been seen yet (panel never lost AC).
"""
_attr_device_class = BinarySensorDeviceClass.POWER
def __init__(self, coordinator: OmniDataUpdateCoordinator) -> None:
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.unique_id}-system-ac"
self._attr_name = "AC Power"
@property @property
def is_on(self) -> bool | None: def is_on(self) -> bool | None:
data = self.coordinator.data data = self.coordinator.data
if data is None: if data is None:
return None return None
zone = data.zones.get(self._index) last = data.last_event
if zone is None: if isinstance(last, AcLost):
return None return False
return zone.is_open if isinstance(last, AcRestored):
return True
if data.system_status is not None:
return data.system_status.ac_ok
return None
class OmniSystemBatteryBinarySensor(_OmniSystemBaseEntity):
"""``battery`` device class — on when the backup battery is LOW."""
_attr_device_class = BinarySensorDeviceClass.BATTERY
def __init__(self, coordinator: OmniDataUpdateCoordinator) -> None:
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.unique_id}-system-battery"
self._attr_name = "Backup Battery"
@property @property
def extra_state_attributes(self) -> dict[str, int] | None: def is_on(self) -> bool | None:
data = self.coordinator.data data = self.coordinator.data
if data is None: if data is None:
return None return None
zone = data.zones.get(self._index) last = data.last_event
if zone is None: if isinstance(last, BatteryLow):
return True
if isinstance(last, BatteryRestored):
return False
if data.system_status is not None:
return not data.system_status.battery_ok
return None
@property
def extra_state_attributes(self) -> dict[str, int] | None:
if self.coordinator.data is None or self.coordinator.data.system_status is None:
return None return None
return { return {
"zone_index": zone.index, "battery_reading": self.coordinator.data.system_status.battery_reading,
"zone_type": zone.zone_type,
"area": zone.area,
"raw_status": zone.status,
"loop_reading": zone.loop,
} }
def _prettify(name: str) -> str: class OmniSystemTroubleBinarySensor(_OmniSystemBaseEntity):
"""Convert ``FRONT_DOOR`` → ``Front Door`` for HA-friendly display.""" """``problem`` device class — on when SystemStatus reports any troubles."""
return name.replace("_", " ").strip().title()
_attr_device_class = BinarySensorDeviceClass.PROBLEM
def __init__(self, coordinator: OmniDataUpdateCoordinator) -> None:
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.unique_id}-system-trouble"
self._attr_name = "System Trouble"
@property
def is_on(self) -> bool | None:
data = self.coordinator.data
if data is None or data.system_status is None:
return None
return bool(data.system_status.troubles)
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
if self.coordinator.data is None or self.coordinator.data.system_status is None:
return None
return {
"troubles": list(self.coordinator.data.system_status.troubles),
}

View File

@ -20,6 +20,15 @@ MANUFACTURER: Final = "HAI / Leviton"
# panel goes quiet. # panel goes quiet.
SCAN_INTERVAL: Final = timedelta(seconds=30) SCAN_INTERVAL: Final = timedelta(seconds=30)
# Background event-listener task name, surfaced to ``asyncio.all_tasks()``
# for diagnostics.
EVENT_TASK_NAME: Final = "omni_pca-event-listener"
# Upper bound for the discovery walk. The protocol caps object indices at
# uint16, but Omni panels never approach that — most installs have <100
# zones / units / areas, so we stop early when discovery returns EOD.
MAX_OBJECT_INDEX: Final = 0xFFFF
# Length, in characters, of a hex-encoded 16-byte controller key. # Length, in characters, of a hex-encoded 16-byte controller key.
CONTROLLER_KEY_HEX_LEN: Final = 32 CONTROLLER_KEY_HEX_LEN: Final = 32

View File

@ -1,23 +1,41 @@
"""DataUpdateCoordinator that owns the long-lived OmniClient connection. """DataUpdateCoordinator that owns the long-lived OmniClient connection.
The coordinator caches *static* panel topology (system info, zone names, Lifecycle
unit names, area names) on first refresh and only re-queries dynamic state ---------
on subsequent updates. Unsolicited messages from the panel are also routed 1. ``async_config_entry_first_refresh`` connects, runs a one-time
through here so binary sensors flip immediately without waiting for the *discovery* pass that enumerates every named zone / unit / area /
next 30s poll. thermostat / button / program on the panel, and seeds ``self.data``
with a populated :class:`OmniData`.
2. ``_async_update_data`` is then called every :data:`SCAN_INTERVAL` to
re-poll *live state only* (extended status for zones / units /
thermostats, basic status for areas).
3. A background task (:meth:`_run_event_listener`) consumes
:meth:`OmniClient.events` for the lifetime of the entry; whenever a
typed :class:`SystemEvent` arrives, the relevant slice of state is
patched in-place and ``async_set_updated_data`` fires so HA pushes
updates to subscribed entities without waiting for the next poll.
The library's :class:`OmniClient` is the *only* thing that talks to the
wire. We keep one client per coordinator and close it on shutdown; on a
recoverable :class:`OmniConnectionError` we drop and recreate it on the
next refresh, preserving the existing :class:`OmniData` so entities don't
flicker to "unavailable" between attempts.
""" """
from __future__ import annotations from __future__ import annotations
import asyncio
import contextlib
from dataclasses import dataclass, field, replace from dataclasses import dataclass, field, replace
from typing import TYPE_CHECKING
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from omni_pca.client import ObjectType, OmniClient from omni_pca.client import ObjectType as ClientObjectType
from omni_pca.client import OmniClient
from omni_pca.connection import ( from omni_pca.connection import (
ConnectionError as OmniConnectionError, ConnectionError as OmniConnectionError,
) )
@ -26,49 +44,88 @@ from omni_pca.connection import (
InvalidEncryptionKeyError, InvalidEncryptionKeyError,
RequestTimeoutError, RequestTimeoutError,
) )
from omni_pca.models import SystemInformation, SystemStatus, ZoneProperties from omni_pca.events import (
AcLost,
AcRestored,
AlarmActivated,
AlarmCleared,
ArmingChanged,
BatteryLow,
BatteryRestored,
SystemEvent,
UnitStateChanged,
ZoneStateChanged,
)
from omni_pca.models import (
OBJECT_TYPE_TO_PROPERTIES,
AreaProperties,
AreaStatus,
ButtonProperties,
ObjectType,
ProgramProperties,
SystemInformation,
SystemStatus,
ThermostatProperties,
ThermostatStatus,
UnitProperties,
UnitStatus,
ZoneProperties,
ZoneStatus,
)
from omni_pca.opcodes import OmniLink2MessageType
from .const import DOMAIN, LOGGER, MANUFACTURER, SCAN_INTERVAL from .const import (
DOMAIN,
EVENT_TASK_NAME,
LOGGER,
MANUFACTURER,
MAX_OBJECT_INDEX,
SCAN_INTERVAL,
)
if TYPE_CHECKING: # --------------------------------------------------------------------------
from omni_pca.message import Message # Public data shape exposed to entities
# --------------------------------------------------------------------------
@dataclass(slots=True)
class OmniZoneState:
"""Per-zone state combining static name with dynamic status."""
index: int
name: str
zone_type: int
area: int
status: int # raw zone status byte from the panel
loop: int
@property
def is_open(self) -> bool:
"""True when the zone is tripped / not-ready / open.
The Omni-Link II ``ZoneStatus`` byte packs current condition in the
low nibble. 0 = secure (closed). Any non-zero current condition is
treated as "not secure" for binary-sensor purposes.
"""
return (self.status & 0x03) != 0
@dataclass(slots=True) @dataclass(slots=True)
class OmniData: class OmniData:
"""Top-level coordinator data exposed to entities.""" """Snapshot of everything a coordinator's entities can read.
system_information: SystemInformation Discovery dictionaries (``zones``, ``units``, ``areas``,
system_status: SystemStatus | None ``thermostats``, ``buttons``, ``programs``) are populated once on
zones: dict[int, OmniZoneState] first refresh and never re-walked they describe panel topology,
unit_names: dict[int, str] = field(default_factory=dict) which only changes when the installer reprograms the controller and
area_names: dict[int, str] = field(default_factory=dict) the user reloads the integration.
Live ``*_status`` dictionaries are re-populated on every poll *and*
patched in-place from the event listener.
"""
system_info: SystemInformation
zones: dict[int, ZoneProperties] = field(default_factory=dict)
units: dict[int, UnitProperties] = field(default_factory=dict)
areas: dict[int, AreaProperties] = field(default_factory=dict)
thermostats: dict[int, ThermostatProperties] = field(default_factory=dict)
buttons: dict[int, ButtonProperties] = field(default_factory=dict)
programs: dict[int, ProgramProperties] = field(default_factory=dict)
zone_status: dict[int, ZoneStatus] = field(default_factory=dict)
unit_status: dict[int, UnitStatus] = field(default_factory=dict)
area_status: dict[int, AreaStatus] = field(default_factory=dict)
thermostat_status: dict[int, ThermostatStatus] = field(default_factory=dict)
system_status: SystemStatus | None = None
last_event: SystemEvent | None = None
# --------------------------------------------------------------------------
# Coordinator
# --------------------------------------------------------------------------
class OmniDataUpdateCoordinator(DataUpdateCoordinator[OmniData]): class OmniDataUpdateCoordinator(DataUpdateCoordinator[OmniData]):
"""Coordinator that owns one OmniClient and one panel device.""" """Coordinator that owns one :class:`OmniClient` and one panel device."""
config_entry: ConfigEntry config_entry: ConfigEntry
@ -92,11 +149,9 @@ class OmniDataUpdateCoordinator(DataUpdateCoordinator[OmniData]):
self._port = port self._port = port
self._controller_key = controller_key self._controller_key = controller_key
self._client: OmniClient | None = None self._client: OmniClient | None = None
self._static_loaded = False self._discovery_done = False
self._zone_names: dict[int, str] = {} self._discovered: OmniData | None = None
self._unit_names: dict[int, str] = {} self._event_task: asyncio.Task[None] | None = None
self._area_names: dict[int, str] = {}
self._system_information: SystemInformation | None = None
# ---- public surface -------------------------------------------------- # ---- public surface --------------------------------------------------
@ -105,13 +160,20 @@ class OmniDataUpdateCoordinator(DataUpdateCoordinator[OmniData]):
"""Stable identifier for this panel (host:port).""" """Stable identifier for this panel (host:port)."""
return f"{self._host}:{self._port}" return f"{self._host}:{self._port}"
@property
def client(self) -> OmniClient:
"""The live OmniClient. Raises if the coordinator hasn't connected yet."""
if self._client is None:
raise RuntimeError("OmniClient is not connected")
return self._client
@property @property
def device_info(self) -> DeviceInfo: def device_info(self) -> DeviceInfo:
"""DeviceInfo for the single hub device this coordinator represents.""" """DeviceInfo for the single hub device this coordinator represents."""
info = self._system_information info = self._discovered.system_info if self._discovered is not None else None
return DeviceInfo( return DeviceInfo(
identifiers={(DOMAIN, self.unique_id)}, identifiers={(DOMAIN, self.unique_id)},
name="Omni Pro II", name=info.model_name if info is not None else "Omni Panel",
manufacturer=MANUFACTURER, manufacturer=MANUFACTURER,
model=info.model_name if info is not None else None, model=info.model_name if info is not None else None,
sw_version=info.firmware_version if info is not None else None, sw_version=info.firmware_version if info is not None else None,
@ -119,14 +181,9 @@ class OmniDataUpdateCoordinator(DataUpdateCoordinator[OmniData]):
) )
async def async_shutdown(self) -> None: async def async_shutdown(self) -> None:
"""Tear down the client connection on unload.""" """Tear down the event task and the client connection on unload."""
if self._client is not None: await self._cancel_event_task()
client = self._client await self._drop_client()
self._client = None
try:
await client.__aexit__(None, None, None)
except Exception:
LOGGER.debug("error closing OmniClient", exc_info=True)
await super().async_shutdown() await super().async_shutdown()
# ---- DataUpdateCoordinator hook ------------------------------------- # ---- DataUpdateCoordinator hook -------------------------------------
@ -134,29 +191,43 @@ class OmniDataUpdateCoordinator(DataUpdateCoordinator[OmniData]):
async def _async_update_data(self) -> OmniData: async def _async_update_data(self) -> OmniData:
try: try:
client = await self._ensure_connected() client = await self._ensure_connected()
if not self._static_loaded: if not self._discovery_done:
await self._load_static(client) self._discovered = await self._run_discovery(client)
self._discovery_done = True
self._start_event_task()
assert self._discovered is not None
base = self._discovered
zone_status = await self._poll_zone_status(client, base.zones)
unit_status = await self._poll_unit_status(client, base.units)
area_status = await self._poll_area_status(client, base.areas)
thermostat_status = await self._poll_thermostat_status(
client, base.thermostats
)
system_status = await self._safe_system_status(client) system_status = await self._safe_system_status(client)
zones = await self._snapshot_zones(client)
except (InvalidEncryptionKeyError, HandshakeError) as err: except (InvalidEncryptionKeyError, HandshakeError) as err:
# Surface as auth failure so HA triggers the reauth flow. # Surface as auth failure so HA triggers the reauth flow.
from homeassistant.exceptions import ConfigEntryAuthFailed await self._drop_client()
raise ConfigEntryAuthFailed(str(err)) from err raise ConfigEntryAuthFailed(str(err)) from err
except (OmniConnectionError, RequestTimeoutError, OSError) as err: except (OmniConnectionError, RequestTimeoutError, OSError) as err:
await self._drop_client() await self._drop_client()
raise UpdateFailed(f"panel unreachable: {err}") from err raise UpdateFailed(f"panel unreachable: {err}") from err
assert self._system_information is not None # set by _load_static # Preserve any last_event already captured by the event task; the
return OmniData( # poll path doesn't see push events so it must not overwrite it.
system_information=self._system_information, last_event = self.data.last_event if self.data is not None else None
return replace(
self._discovered,
zone_status=zone_status,
unit_status=unit_status,
area_status=area_status,
thermostat_status=thermostat_status,
system_status=system_status, system_status=system_status,
zones=zones, last_event=last_event,
unit_names=dict(self._unit_names),
area_names=dict(self._area_names),
) )
# ---- internals ------------------------------------------------------- # ---- connection management ------------------------------------------
async def _ensure_connected(self) -> OmniClient: async def _ensure_connected(self) -> OmniClient:
if self._client is not None: if self._client is not None:
@ -166,14 +237,9 @@ class OmniDataUpdateCoordinator(DataUpdateCoordinator[OmniData]):
port=self._port, port=self._port,
controller_key=self._controller_key, controller_key=self._controller_key,
) )
# Manually drive __aenter__ so we can keep the connection open # Drive __aenter__ manually so the client survives across update
# across update cycles instead of using `async with`. # cycles; we close it explicitly on shutdown / failure.
await client.__aenter__() await client.__aenter__()
try:
await client.subscribe(self._handle_unsolicited)
except Exception:
await client.__aexit__(None, None, None)
raise
self._client = client self._client = client
return client return client
@ -184,83 +250,451 @@ class OmniDataUpdateCoordinator(DataUpdateCoordinator[OmniData]):
self._client = None self._client = None
try: try:
await client.__aexit__(None, None, None) await client.__aexit__(None, None, None)
except Exception: except Exception: # pragma: no cover - best-effort cleanup
LOGGER.debug("error during reconnect cleanup", exc_info=True) LOGGER.debug("error during client cleanup", exc_info=True)
async def _load_static(self, client: OmniClient) -> None: # ---- discovery -------------------------------------------------------
self._system_information = await client.get_system_information()
self._zone_names = await client.list_zone_names() async def _run_discovery(self, client: OmniClient) -> OmniData:
# Unit / area names are best-effort; some panels may not have any. """Walk every object type once and stash the static topology."""
try: system_info = await client.get_system_information()
self._unit_names = await client.list_unit_names()
except Exception: zones = await self._discover_zones(client)
LOGGER.debug("list_unit_names failed; continuing", exc_info=True) units = await self._discover_units(client)
self._unit_names = {} areas = await self._discover_areas(client)
try: thermostats = await self._discover_thermostats(client)
self._area_names = await client.list_area_names() buttons = await self._discover_buttons(client)
except Exception: programs = await self._discover_programs(client)
LOGGER.debug("list_area_names failed; continuing", exc_info=True)
self._area_names = {} LOGGER.info(
self._static_loaded = True "omni_pca discovery: %d zones, %d units, %d areas, "
LOGGER.debug( "%d thermostats, %d buttons, %d programs",
"loaded static topology: %d zones, %d units, %d areas", len(zones),
len(self._zone_names), len(units),
len(self._unit_names), len(areas),
len(self._area_names), len(thermostats),
len(buttons),
len(programs),
)
return OmniData(
system_info=system_info,
zones=zones,
units=units,
areas=areas,
thermostats=thermostats,
buttons=buttons,
programs=programs,
) )
async def _safe_system_status(self, client: OmniClient) -> SystemStatus | None: async def _discover_zones(
self, client: OmniClient
) -> dict[int, ZoneProperties]:
names = await self._best_effort(client.list_zone_names, default={})
out: dict[int, ZoneProperties] = {}
for index in sorted(names):
try:
props = await client.get_object_properties(
ClientObjectType.ZONE, index
)
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("zone %d properties fetch failed", index, exc_info=True)
continue
if isinstance(props, ZoneProperties):
out[index] = props
return out
async def _discover_units(
self, client: OmniClient
) -> dict[int, UnitProperties]:
names = await self._best_effort(client.list_unit_names, default={})
out: dict[int, UnitProperties] = {}
for index in sorted(names):
try:
props = await client.get_object_properties(
ClientObjectType.UNIT, index
)
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("unit %d properties fetch failed", index, exc_info=True)
continue
if isinstance(props, UnitProperties):
out[index] = props
return out
async def _discover_areas(
self, client: OmniClient
) -> dict[int, AreaProperties]:
names = await self._best_effort(client.list_area_names, default={})
out: dict[int, AreaProperties] = {}
for index in sorted(names):
try:
props = await client.get_object_properties(
ClientObjectType.AREA, index
)
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("area %d properties fetch failed", index, exc_info=True)
continue
if isinstance(props, AreaProperties):
out[index] = props
return out
async def _discover_thermostats(
self, client: OmniClient
) -> dict[int, ThermostatProperties]:
"""Walk thermostat properties via the low-level connection.
The high-level :meth:`OmniClient.get_object_properties` only knows
zone/unit/area parsers in v1.0 of the library; thermostats are in
:data:`OBJECT_TYPE_TO_PROPERTIES` on the model side, so we drive
the wire ourselves and parse with the model's class.
"""
return await self._walk_properties(
client, ObjectType.THERMOSTAT, ThermostatProperties
)
async def _discover_buttons(
self, client: OmniClient
) -> dict[int, ButtonProperties]:
return await self._walk_properties(
client, ObjectType.BUTTON, ButtonProperties
)
async def _discover_programs(
self, client: OmniClient
) -> dict[int, ProgramProperties]:
# Programs aren't reachable via the Properties opcode (the C# side
# uses a separate request/reply pair), so we just return an empty
# dict. We keep the field on OmniData so Phase B can plug in real
# discovery the moment the library exposes it. AMBIGUITY: the spec
# asks for "named programs" — there's no on-the-wire path for that
# in v1.0 of omni_pca, so an empty mapping is the honest answer.
_ = client, ProgramProperties
return {}
async def _walk_properties(
self,
client: OmniClient,
object_type: ObjectType,
parser: type,
) -> dict[int, object]:
"""Walk every defined object of ``object_type`` and parse with ``parser``.
Mirrors the strategy used by ``OmniClient._walk_named_objects`` but
works for any model in :data:`OBJECT_TYPE_TO_PROPERTIES` (the
client's internal parser table only covers zones/units/areas in
v1.0). We drive ``RequestProperties`` directly on the connection
so we don't have to monkey-patch the library.
"""
if parser is None or OBJECT_TYPE_TO_PROPERTIES.get(int(object_type)) is None:
return {}
out: dict[int, object] = {}
cursor = 0
conn = client.connection
# Manual request/reply loop with relative_direction=1 (=next).
for _ in range(MAX_OBJECT_INDEX):
payload = bytes(
[
int(object_type),
(cursor >> 8) & 0xFF,
cursor & 0xFF,
1, # relative_direction = next
0, 0, 0, # filter1..3
]
)
try:
reply = await conn.request(
OmniLink2MessageType.RequestProperties, payload
)
except RequestTimeoutError:
break
if reply.opcode == int(OmniLink2MessageType.EOD):
break
if reply.opcode != int(OmniLink2MessageType.Properties):
break
try:
obj = parser.parse(reply.payload)
except Exception:
LOGGER.debug(
"parse failed for %s past index %d",
object_type.name,
cursor,
exc_info=True,
)
break
# Object name being empty is OK for buttons/programs but the
# spec says "named only" — we still keep the entry as a
# candidate; entity setup filters by truthiness.
index_attr = getattr(obj, "index", None)
name_attr = getattr(obj, "name", "")
if index_attr is None:
break
if name_attr:
out[index_attr] = obj
cursor = index_attr
if cursor >= MAX_OBJECT_INDEX:
break
return out
@staticmethod
async def _best_effort(coro_fn, *, default):
"""Call ``coro_fn()`` and swallow non-transport errors, returning ``default``.
We let :class:`OmniConnectionError` / :class:`RequestTimeoutError`
propagate so the coordinator can drop the client and reconnect;
anything else (a parse failure on a particular reply, NAK on a
feature the panel doesn't support) is downgraded to a debug log.
"""
try:
return await coro_fn()
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("best-effort %s failed", coro_fn.__name__, exc_info=True)
return default
# ---- live polling ----------------------------------------------------
async def _poll_zone_status(
self, client: OmniClient, zones: dict[int, ZoneProperties]
) -> dict[int, ZoneStatus]:
if not zones:
return {}
end = max(zones)
try:
records = await client.get_extended_status(ObjectType.ZONE, 1, end)
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("zone extended_status poll failed", exc_info=True)
return self.data.zone_status if self.data is not None else {}
return {
r.index: r
for r in records
if isinstance(r, ZoneStatus) and r.index in zones
}
async def _poll_unit_status(
self, client: OmniClient, units: dict[int, UnitProperties]
) -> dict[int, UnitStatus]:
if not units:
return {}
end = max(units)
try:
records = await client.get_extended_status(ObjectType.UNIT, 1, end)
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("unit extended_status poll failed", exc_info=True)
return self.data.unit_status if self.data is not None else {}
return {
r.index: r
for r in records
if isinstance(r, UnitStatus) and r.index in units
}
async def _poll_area_status(
self, client: OmniClient, areas: dict[int, AreaProperties]
) -> dict[int, AreaStatus]:
if not areas:
return {}
end = max(areas)
try:
records = await client.get_object_status(ObjectType.AREA, 1, end)
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("area status poll failed", exc_info=True)
return self.data.area_status if self.data is not None else {}
return {
r.index: r
for r in records
if isinstance(r, AreaStatus) and r.index in areas
}
async def _poll_thermostat_status(
self, client: OmniClient, thermostats: dict[int, ThermostatProperties]
) -> dict[int, ThermostatStatus]:
if not thermostats:
return {}
end = max(thermostats)
try:
records = await client.get_extended_status(
ObjectType.THERMOSTAT, 1, end
)
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("thermostat extended_status poll failed", exc_info=True)
return (
self.data.thermostat_status if self.data is not None else {}
)
return {
r.index: r
for r in records
if isinstance(r, ThermostatStatus) and r.index in thermostats
}
async def _safe_system_status(
self, client: OmniClient
) -> SystemStatus | None:
try: try:
return await client.get_system_status() return await client.get_system_status()
except (OmniConnectionError, RequestTimeoutError): except (OmniConnectionError, RequestTimeoutError):
raise raise
except Exception: except Exception:
LOGGER.debug("get_system_status failed; continuing", exc_info=True) LOGGER.debug("get_system_status failed", exc_info=True)
return None return None
async def _snapshot_zones(self, client: OmniClient) -> dict[int, OmniZoneState]: # ---- event listener --------------------------------------------------
zones: dict[int, OmniZoneState] = {}
for index, name in self._zone_names.items():
try:
props = await client.get_object_properties(ObjectType.ZONE, index)
except (OmniConnectionError, RequestTimeoutError):
raise
except Exception:
LOGGER.debug("zone %d snapshot failed; skipping", index, exc_info=True)
continue
if not isinstance(props, ZoneProperties):
continue
zones[index] = OmniZoneState(
index=index,
name=name,
zone_type=props.zone_type,
area=props.area,
status=props.status,
loop=props.loop,
)
return zones
async def _handle_unsolicited(self, msg: Message) -> None: def _start_event_task(self) -> None:
"""Push-driven update path. if self._event_task is not None and not self._event_task.done():
return
self._event_task = self.config_entry.async_create_background_task(
self.hass,
self._run_event_listener(),
EVENT_TASK_NAME,
)
We don't try to be clever about parsing every unsolicited opcode async def _cancel_event_task(self) -> None:
here. The simplest correct behavior is to nudge HA to refetch on if self._event_task is None:
any panel-initiated message; entities will see fresh zone state return
within one round-trip. task = self._event_task
self._event_task = None
if not task.done():
task.cancel()
with contextlib.suppress(asyncio.CancelledError, Exception):
await task
async def _run_event_listener(self) -> None:
"""Background loop: consume typed events and push state to entities.
Re-establishes the iterator on each connection cycle. If the
client gets dropped (transport error during a poll), we exit; the
next ``_async_update_data`` will reconnect and respawn this task.
""" """
LOGGER.debug("unsolicited opcode %#04x payload=%s", msg.opcode, msg.payload.hex()) client = self._client
# Schedule a refresh on the event loop without awaiting from the if client is None:
# subscriber callback (which lives in the connection's read loop).
self.hass.async_create_task(self._refresh_after_push())
async def _refresh_after_push(self) -> None:
if self.data is None or self._client is None:
return return
try: try:
zones = await self._snapshot_zones(self._client) async for event in client.events():
except (OmniConnectionError, RequestTimeoutError): self._apply_event(event)
await self.async_request_refresh() except asyncio.CancelledError:
raise
except (OmniConnectionError, RequestTimeoutError, OSError):
LOGGER.debug("event listener exited on transport error", exc_info=True)
except Exception: # pragma: no cover - defensive
LOGGER.exception("event listener crashed")
def _apply_event(self, event: SystemEvent) -> None:
"""Patch ``self.data`` in place for the relevant event subclass."""
data = self.data
if data is None:
return return
# Mutate a copy so listeners see a brand-new object identity. new_data = self._patched_for_event(data, event)
new_data = replace(self.data, zones=zones) if new_data is not None:
self.async_set_updated_data(new_data) self.async_set_updated_data(new_data)
def _patched_for_event(
self, data: OmniData, event: SystemEvent
) -> OmniData | None:
"""Return a new OmniData reflecting ``event``, or ``None`` to skip.
Pure-ish (mutates only the dict members of the returned snapshot).
Split out so it stays unit-testable without HA.
"""
if isinstance(event, ZoneStateChanged):
existing = data.zone_status.get(event.zone_index)
if existing is None:
# We saw a zone the discovery missed — synthesize a record
# so entities at least see the open/closed flip.
new_status = ZoneStatus(
index=event.zone_index,
raw_status=0x01 if event.is_open else 0x00,
loop=0,
)
else:
# Toggle low-2-bit current condition; preserve the rest.
base = existing.raw_status & ~0x03
new_raw = base | (0x01 if event.is_open else 0x00)
new_status = ZoneStatus(
index=existing.index,
raw_status=new_raw,
loop=existing.loop,
)
patched = dict(data.zone_status)
patched[event.zone_index] = new_status
return replace(data, zone_status=patched, last_event=event)
if isinstance(event, UnitStateChanged):
existing = data.unit_status.get(event.unit_index)
new_state = 1 if event.is_on else 0
if existing is None:
new_status = UnitStatus(
index=event.unit_index,
state=new_state,
time_remaining_secs=0,
)
else:
# Preserve a brightness level if we have one — the event
# only carries on/off.
if existing.state >= 100 and event.is_on:
new_status = existing
else:
new_status = UnitStatus(
index=existing.index,
state=new_state,
time_remaining_secs=existing.time_remaining_secs,
)
patched = dict(data.unit_status)
patched[event.unit_index] = new_status
return replace(data, unit_status=patched, last_event=event)
if isinstance(event, ArmingChanged):
existing = data.area_status.get(event.area_index)
if existing is None:
if event.area_index == 0:
# System-wide arming change with no specific area —
# let the next poll resync.
return replace(data, last_event=event)
new_status = AreaStatus(
index=event.area_index,
mode=event.new_mode,
last_user=event.user_index,
entry_timer_secs=0,
exit_timer_secs=0,
alarms=0,
)
else:
new_status = AreaStatus(
index=existing.index,
mode=event.new_mode,
last_user=event.user_index,
entry_timer_secs=existing.entry_timer_secs,
exit_timer_secs=existing.exit_timer_secs,
alarms=existing.alarms,
)
patched = dict(data.area_status)
patched[new_status.index] = new_status
return replace(data, area_status=patched, last_event=event)
if isinstance(event, AlarmActivated | AlarmCleared):
# Force a poll so AreaStatus.alarms picks up the current bits.
self.hass.async_create_task(self.async_request_refresh())
return replace(data, last_event=event)
if isinstance(event, AcLost | AcRestored | BatteryLow | BatteryRestored):
# Just stash the event; the system_* binary sensors derive
# their state from `last_event` alone.
return replace(data, last_event=event)
# Other event families are interesting but don't move any
# currently-modeled state — record them for diagnostics so
# subscribers can still react via the last_event attribute.
return replace(data, last_event=event)

View File

@ -0,0 +1,151 @@
"""Pure helper functions for the omni_pca integration.
Anything in this module is deliberately decoupled from Home Assistant and
the live OmniClient so it can be unit-tested without either dependency.
The HA-side code (binary_sensor, etc.) imports these and converts the
returned strings to ``BinarySensorDeviceClass`` enum members.
"""
from __future__ import annotations
from typing import Final
# String values that correspond 1:1 to HA's BinarySensorDeviceClass enum
# members. We return strings here (instead of importing the enum) so this
# module stays importable without Home Assistant in the venv.
DEVICE_CLASS_OPENING: Final = "opening"
DEVICE_CLASS_DOOR: Final = "door"
DEVICE_CLASS_WINDOW: Final = "window"
DEVICE_CLASS_MOTION: Final = "motion"
DEVICE_CLASS_SMOKE: Final = "smoke"
DEVICE_CLASS_GAS: Final = "gas"
DEVICE_CLASS_MOISTURE: Final = "moisture"
DEVICE_CLASS_TAMPER: Final = "tamper"
DEVICE_CLASS_SAFETY: Final = "safety"
DEVICE_CLASS_PROBLEM: Final = "problem"
DEVICE_CLASS_SOUND: Final = "sound"
DEVICE_CLASS_HEAT: Final = "heat"
DEVICE_CLASS_COLD: Final = "cold"
# Maps the Omni ``enuZoneType`` byte (see ``omni_pca.models.ZoneType``) to
# a HA ``BinarySensorDeviceClass`` string. The mapping is a judgement
# call — Omni's zone-type taxonomy is finer-grained than HA's binary
# sensor classes, so we collapse a few buckets:
#
# * Perimeter / entry-exit / latching variants → opening
# (most installs use these for door/window contacts)
# * Interior / night / away interior → motion (PIRs)
# * Fire family (FIRE/FIRE_EMERGENCY/FIRE_TAMPER) → smoke
# * Water / freeze → moisture / cold
# * Gas → gas
# * Tamper / latching tamper → tamper
# * Panic / police / silent duress / aux-emerg → safety
# * Temperature / humidity / aux → not a binary sensor
# (callers should skip — see ``is_binary_zone_type``)
#
# The default for any unmapped value is "opening", which matches the
# dominant residential install (perimeter contact).
_ZONE_TYPE_TO_DEVICE_CLASS: dict[int, str] = {
# Burglary / contact zones
0: DEVICE_CLASS_OPENING, # ENTRY_EXIT
1: DEVICE_CLASS_OPENING, # PERIMETER
4: DEVICE_CLASS_OPENING, # DOUBLE_ENTRY_DELAY
5: DEVICE_CLASS_OPENING, # QUAD_ENTRY_DELAY
6: DEVICE_CLASS_OPENING, # LATCHING_PERIMETER
67: DEVICE_CLASS_OPENING, # EXIT_TERMINATOR
# Motion zones
2: DEVICE_CLASS_MOTION, # NIGHT_INTERIOR
3: DEVICE_CLASS_MOTION, # AWAY_INTERIOR
7: DEVICE_CLASS_MOTION, # LATCHING_NIGHT_INTERIOR
8: DEVICE_CLASS_MOTION, # LATCHING_AWAY_INTERIOR
# Panic / duress / police family
16: DEVICE_CLASS_SAFETY, # PANIC
17: DEVICE_CLASS_SAFETY, # POLICE_EMERGENCY
18: DEVICE_CLASS_SAFETY, # SILENT_DURESS
48: DEVICE_CLASS_SAFETY, # AUX_EMERGENCY
# Tamper
19: DEVICE_CLASS_TAMPER, # TAMPER
20: DEVICE_CLASS_TAMPER, # LATCHING_TAMPER
56: DEVICE_CLASS_TAMPER, # FIRE_TAMPER (treat as tamper, not smoke)
# Fire family
32: DEVICE_CLASS_SMOKE, # FIRE
33: DEVICE_CLASS_SMOKE, # FIRE_EMERGENCY
# Other safety / environmental
34: DEVICE_CLASS_GAS, # GAS
49: DEVICE_CLASS_PROBLEM, # TROUBLE
54: DEVICE_CLASS_COLD, # FREEZE
55: DEVICE_CLASS_MOISTURE, # WATER
# Sound / aux
64: DEVICE_CLASS_SOUND, # AUXILIARY (loose mapping; use sound)
65: DEVICE_CLASS_OPENING, # KEYSWITCH
66: DEVICE_CLASS_OPENING, # SHUNT_LOCK
}
# Zone-type bytes that don't map to a binary sensor at all — they're
# numeric readings (temperature, humidity, energy) and should be exposed
# via the sensor platform in Phase B instead. We skip these in
# binary_sensor setup.
_ANALOG_ZONE_TYPES: frozenset[int] = frozenset({
80, # ENERGY_SAVER
81, # OUTDOOR_TEMP
82, # TEMPERATURE
83, # TEMP_ALARM
84, # HUMIDITY
})
def device_class_for_zone_type(zone_type: int) -> str:
"""Return the HA ``BinarySensorDeviceClass`` value for an Omni zone type.
Defaults to ``"opening"`` the most common contact-sensor case for
any zone-type byte we don't have an explicit mapping for. Callers
should check :func:`is_binary_zone_type` first to decide whether the
zone makes sense as a binary sensor at all.
"""
return _ZONE_TYPE_TO_DEVICE_CLASS.get(zone_type, DEVICE_CLASS_OPENING)
def is_binary_zone_type(zone_type: int) -> bool:
"""True iff this zone type belongs on the binary_sensor platform.
Analog/numeric zone types (temperature, humidity, energy savers) are
sensor-platform candidates, not binary sensors, so we filter them out
here so the coordinator's discovery doesn't have to know.
"""
return zone_type not in _ANALOG_ZONE_TYPES
# Zone types whose live ``is_on`` semantics should be derived from the
# *latched* alarm bit (alarm tripped) rather than the current condition
# bit (open/closed). Smoke/fire/gas/water/freeze/panic are latching by
# nature — a smoke detector that flashed for one second still wants to
# read "on" until the user clears the alarm.
_LATCHED_ALARM_ZONE_TYPES: frozenset[int] = frozenset({
16, 17, 18, # panic family
19, 20, 56, # tamper family
32, 33, # fire family
34, # gas
48, # aux emergency
54, 55, # freeze, water
})
def use_latched_alarm_for_zone(zone_type: int) -> bool:
"""True if this zone's ``is_on`` should track the latched alarm bit.
For door/window/motion zones we use the *current condition* bit (live
open/closed). For latching alarm zones (smoke, water, panic, ) we
instead use the latched-tripped bit so a brief sensor blip stays
visible to the user until the alarm is cleared.
"""
return zone_type in _LATCHED_ALARM_ZONE_TYPES
def prettify_name(name: str) -> str:
"""Convert the panel's ``FRONT_DOOR`` style name into ``Front Door``.
Returns an empty string unchanged so callers can use truthiness to
detect "no name configured on this index".
"""
return name.replace("_", " ").strip().title()

695
docs/JOURNEY.md Normal file
View File

@ -0,0 +1,695 @@
# JOURNEY
Raw chronological notes from a few days reverse-engineering HAI's PC Access
3.17, then writing a Python library and a Home Assistant integration to
talk to the panel directly. Dated. Append-only-ish.
---
## 2026-05-10 morning — the pile of binaries
Started with a directory called `PC Access/` that had clearly been zipped
up off a Mac and handed around. The giveaway was `._*` files next to every
real file:
```
-rw------- 1 kdm kdm 120 Aug 15 2016 ._Newtonsoft.Json.dll
-rw------- 1 kdm kdm 484352 Aug 15 2016 Newtonsoft.Json.dll
```
That's AppleDouble cruft: macOS extended attributes shimmed into companion
files when an HFS+ volume gets archived to a non-Apple filesystem. 120 bytes
of resource fork garbage per real file. Useless. Touched everything from
the PC Access install date (Mar 2018) all the way back to a 2006 firmware
updater. Whoever extracted this had been carrying it across Macs for years.
What we actually had:
| File | Size | What it is |
|------|-----:|-----|
| `PCA3U_EN.exe` | 5.4 MB | The PC Access GUI, a .NET assembly (v3.17.0.843, 2018-01-02) |
| `PCA1106W.exe` | 3.3 MB | Older native C++ version from 2008 |
| `f_update.exe` | 437 KB | Native firmware updater (2006) |
| `OT7FileUploaderLib.dll` | 16 KB | OmniTouch 7 firmware uploader |
| `Our House.pca` | 144 KB | A panel config file. High entropy. Not ours. |
| `PCA01.CFG` | 318 B | App settings. Also encrypted. |
| `Serial Number.txt` | 20 B | A 20-char license key |
`Our House.pca` was the interesting one. Entropy 7.994 bits per byte —
either compressed, encrypted, or both. No magic bytes. No structure
visible in the first 256 bytes. It also had someone else's account name
embedded in the metadata: this panel had been bought used and shipped
with the previous owner's config still on it. Held that thought.
`file PCA3U_EN.exe` came back with `Mono/.Net assembly`. That was the
single biggest piece of luck in the whole project: a .NET assembly means
ilspycmd will give us back readable C# in seconds. Beats staring at IDA
listings of Borland C++ runtime stubs all afternoon, which is what
`PCA1106W.exe` would have made us do.
## 2026-05-10 — decompile and skim
Ran ilspycmd 10.0.1.8346 over `PCA3U_EN.exe`. 898 typedefs. They cleanly
split into two namespaces:
- `HAI_Shared` — the domain model, the wire protocol, the crypto, all of
it reusable across HAI's product line (Omni, Lumina, HMS).
- `PCAccess3` — just UI. Forms, controls, window positions.
That's the prize: `HAI_Shared` is essentially a free protocol
implementation library, written by people who actually know how the panel
works, sitting there in C# waiting to be read.
First skim of `HAI_Shared`:
- `clsOmniLinkPacket` — outer transport packet. 4-byte header
(`[seq_hi][seq_lo][type][reserved=0]`) + payload. Sequence number is
big-endian. There are 12 packet types: NewSession, AckNewSession,
RequestSecureSession, AckSecureSession, two flavors of
SessionTerminated, the `OmniLinkMessage` (encrypted, v1) and
`OmniLink2Message` (encrypted, v2) wrappers, plus their unencrypted
twins.
- `clsOmniLinkMessage` — inner application message.
`[StartChar][MessageLength][...payload, payload[0]=opcode...][CRC_lo][CRC_hi]`.
CRC is CRC-16/MODBUS with poly `0xA001`. Standard.
- `clsAES` — the panel's symmetric crypto. AES-128, ECB,
`PaddingMode.Zeros`, key reused as IV (which is fine in ECB but a code
smell that hints at someone copy-pasting from a textbook).
- `enuOmniLink2MessageType` — 83 v2 opcodes. Login, Logout,
RequestSystemInformation, RequestExtendedStatus, Command, ZigBee
pass-through, firmware upload, etc.
- `clsCapOMNI_PRO_II`, `clsCapLUMINA`, `clsCapHMS950e`, … — per-model
capability classes carrying constants like `numZones=176`,
`numUnits=511`. Real domain model, not a config file.
Wrote those down in `findings.md` and pushed on.
## 2026-05-10 — the cipher that wasn't AES
Then we hit the file format. The `.pca` and `.CFG` blobs *look* like
AES-CBC ciphertext. They aren't. From `clsPcaCryptFileStream`:
```csharp
private byte oldRandom(byte max) {
RandomSeed = RandomSeed * 134775813 + 1;
return (byte)((RandomSeed >> 16) % max);
}
// per byte: ciphertext = plaintext ^ oldRandom(255) // mod 255, not 256
```
That multiplier — `134775813` = `0x08088405` — is the Borland Delphi /
Turbo Pascal `Random()` LCG. So someone wrote this thing in Delphi
originally, ported it to C#, and kept the exact same PRNG so existing
.pca files would still decrypt. The mod-255 (not 256) stays in too,
which means the keystream byte is in `[0..254]`, never `0xFF`. It
doesn't lose information — it just shifts the output distribution.
Quirky but not broken.
Two hardcoded 32-bit keys live in `clsPcaCfg`:
```csharp
private readonly uint keyPC01 = 338847091u; // 0x142A3D33 — for PCA01.CFG
public readonly uint keyExport = 391549495u; // for exported .pca files
```
And a third path: `SetSecurityStamp(string S)` derives a per-installation
key from a stamp string:
```csharp
uint num = 305419896u; // 0x12345678 — developer Easter egg as init value
foreach (char c in S)
num = ((num ^ c) << 7) ^ c;
Key = num;
```
`0x12345678` as an init constant is the giveaway: someone was bored at
the keyboard the day they wrote this. It's the kind of thing you grep
for. (The actual hash function, `((k ^ c) << 7) ^ c`, is fine — not
cryptographic, but fine for "let me derive a per-install key from a
serial number.")
## 2026-05-10 — the wrong-key-looks-right problem
Wrote a Python decryptor in maybe an hour: a generator that yields
keystream bytes, an XOR over the file. Easy.
Then we hit a subtle thing. The first script auto-tried the two known
keys and picked the one whose plaintext "looked more printable". It
picked `keyExport`, ran the parser, and got nonsense — but a *plausible*
kind of nonsense: short non-empty strings, non-zero counter values,
generally the texture of real binary data.
Turns out **printable-character ratio is a terrible heuristic for binary
file plaintext.** Random noise is, on average, slightly more "printable"
than a real binary file padded with zeros and length-prefixed strings —
because random noise has a uniform distribution and a real file has long
runs of `0x00` (which falls outside the 32127 printable range).
Replaced it with something concrete and stupid:
```python
def score(pt):
n = pt[0]
if not (1 <= n <= 64): return 0
tag = pt[1:1+n]
if all(32 <= b < 127 for b in tag):
return 100 + n
return 0
```
The first byte is a String8 length, and the next `n` bytes should be the
ASCII version tag like `CFG05` or `PCA03`. If it parses cleanly, the key
is right; if not, it isn't. Robust because it's not statistical.
`PCA01.CFG` decrypted with `keyPC01`. First bytes:
```
00000000 05 43 46 47 30 35 17 41 ... .CFG05.A
```
`CFG05`. Format version 5. Walked the rest of the schema (modem strings,
port number, key field, password) and pulled out the prize:
```
pca_key = 0xC1A280B2 (3,248,652,466)
password = "PASSWORD" # factory default, never changed
```
So the per-installation `.pca` key was sitting inside `PCA01.CFG` the
whole time, encrypted with a hardcoded key that's right there in the
binary. The `keyExport` path is only for files that were exported for
sharing, which is *not* what `Our House.pca` was — it was the live
in-place config.
Decrypted `Our House.pca` with `0xC1A280B2`. First bytes:
```
00000000 05 50 43 41 30 33 ... .PCA03
```
`PCA03`. File format v3. Right key.
## 2026-05-10 — the 2191-byte header parses byte-perfect
Read `clsHAC.ReadFileHeader` to figure out the layout:
```
String8 version_tag "PCA03"
String8(30) AccountName
String16(120) AccountAddress
String8(20) AccountPhone
String8(4) AccountCode
String16(2000) AccountRemarks
byte Model
byte MajorVersion
byte MinorVersion
sbyte Revision
```
One thing about `ReadString8(out S, byte L)`: it always consumes
`1 + L` bytes regardless of the declared string length. So the strings
are fixed-width slots with a length prefix, not variable-length.
Total header size: 2191 bytes.
Then we found the validation block at `clsHAC.cs:7943`:
```csharp
if (num == 2191) { /* header read OK */ }
```
If your byte counter doesn't equal 2191 after parsing the header, you
got it wrong. It did. That was the moment we knew the parser was
correct: not by inspection of the output, but by hitting an exact magic
number that the original code was checking against.
Decoded header:
- Model byte = `0x10` = `enuModel.OMNI_PRO_II`
- Firmware: 2.12 r1
- AccountName / Address / Phone — the previous owner's PII
- 8 user codes, all still factory default `12345678`
That last one stung. The panel had probably been sitting on someone's
wall for a decade with `12345678` as the master code. (Not our panel,
yet — but our panel was about to inherit it.) Plaintext stays in
`extracted/Our_House.pca.plain` and that path stays in `.gitignore`.
All future notes redact PII.
## 2026-05-10 — walking the body
Header was 2191 bytes; the file is 144 KB. Plenty more to parse before
we'd hit the network connection block where the AES key for live-panel
talk is stored.
The body layout (from `clsHAC.ReadFromFile`):
```
ByteArray SetupData.data (3840 bytes for OMNI_PRO_II)
bool slRequireCodeForSecurity
bool slPasswordOnRestore
UInt16 (discarded)
UInt16 EventLog.Count
UInt32 (discarded)
ZoneNames, UnitNames, ButtonNames, CodeNames, ThermostatNames,
AreaNames, MessageNames
ZoneVoices, UnitVoices, ButtonVoices, CodeVoices, ThermostatVoices,
AreaVoices, MessageVoices
Programs
EventLog
# v >= 2:
if Ethernet feature:
String8(120) Connection.NetworkAddress
String8(5) port-string
String8(32) ControllerKey-as-hex <- 32 hex chars = 16-byte AES key
...
```
The Names blocks were straightforward: each is `max_slots * (1 + name_len)`
bytes. For Zones that's `176 * 16 = 2816` bytes. Adds up cleanly.
Then we hit the Voices blocks and the parser desynced.
## 2026-05-10 — the latent bug in PC Access itself
Each "Voice" block lets the panel speak the name of an object. Six
phrases per object (`numVoicePhrases = 6`). The C# reads them like this:
```csharp
byte[] B = new byte[CAP.numVoicePhrases]; // 6 bytes
for (int i = 1; i <= GetFileMaxX(); i++) {
num = (i > Count)
? num + FS.ReadByteArray(out B, B.Length) // skip path: 6 bytes
: num + _Items[i-1].Voice.Read(FS); // structured path
}
```
The "structured path" calls `clsVoiceWordArray.Read`, which branches on
whether the panel has the `LargeVocabulary` feature:
- LargeVocabulary present → 6 phrases × **2 bytes** (UInt16) = **12 bytes**
- LargeVocabulary absent → 6 phrases × 1 byte = 6 bytes
OMNI_PRO_II *has* LargeVocabulary. So the structured path reads 12 bytes
per slot. But the **skip path** in the loop above always reads 6 bytes,
no matter what. There's no `if (LargeVocabulary) B = new byte[12];`.
If `Count == GetFileMaxX()` (every slot is filled), this never matters —
the skip path is never taken. For every block on our panel except one,
that's true. But Units has `Count = 511` and `GetFileMaxX = 512`, so
exactly one slot takes the skip path, reads 6 bytes when it should have
read 12, and the next 6 bytes — which are actually the start of the
*next* block — get treated as the tail of the current slot. The parser
walks 6 bytes off the rails and never recovers.
The C# code in the wild gets away with this because `Count >= Max` for
basically all real panels in deployment. But it's a real bug — it would
bite if a model ever shipped with LargeVocabulary AND had Buttons or
Messages with `Count < Max`. We patched our parser; the original is
still wrong.
Found it by hex-dumping the file, locating the panel IP address
(`192.168.1.9`) at byte offset `0xe2d8`, and back-solving the diff
between where we expected to land and where the IP actually was. The
gap was exactly 6684 bytes, which is `(512-1)*6` worth of voice slots
read at half the right size. Math checked out. Off by N.
## 2026-05-10 — the prize
After the Voices, the body has Programs (1500 × 14 B), EventLog (250 ×
9 B), and then — for a v3 file with the Ethernet feature — the
Connection block:
```
String8(120) Connection.NetworkAddress
String8(5) port-string
String8(32) ControllerKey-as-hex
```
For our panel:
- IP: `192.168.1.9`
- Port: `4369`
- ControllerKey: 16 bytes of AES-128 key, extracted at file offset
`0xe2d8`
Total bytes to that point: `2191 + 3840 + 10 + 15407 + 13374 + 21000 + 2250 = 58072 = 0xe2d8`.
Exactly the offset where the IP appears in the hex dump. Done.
That key plus the right handshake = direct talk to the panel.
## 2026-05-10 — the two non-public quirks
Now we needed to read `clsOmniLinkConnection.cs`. It's 2109 lines of
state machine for the secure-session handshake, the keepalive timer, the
TCP framing, and the encryption. We expected a textbook AES session: send
client-hello, get server-hello, derive key from PIN somehow, encrypt
everything from then on.
What we found instead were two surprises that no public Omni-Link
write-up we'd seen mentions. Both of them look like quirks. Both of them
will reject your client with `ControllerSessionTerminated` if you skip
them.
### Quirk 1 — the session key is not the ControllerKey
You'd expect the AES session key to be the ControllerKey verbatim. It
isn't. From `clsOmniLinkConnection.cs:1886-1892`:
```csharp
SessionKey = new byte[16];
ControllerKey.CopyTo(SessionKey, 0);
for (int j = 0; j < 5; j++)
{
SessionKey[11 + j] = (byte)(ControllerKey[11 + j] ^ SessionID[j]);
}
AES = new clsAES(SessionKey);
```
The first 11 bytes of the session key are the ControllerKey verbatim.
The last 5 bytes are the ControllerKey XORed with a 5-byte `SessionID`
nonce that the controller sent in `ControllerAckNewSession`. That's
the entire key derivation. No PBKDF2, no HKDF, no PIN, no salt. Just
five bytes of XOR.
The same five-byte block appears twice in the source — once for UDP
(line 1423) and once for TCP (line 1886). Identical.
The implication for someone writing a client is: if you encrypt your
`ClientRequestSecureSession` with the raw ControllerKey, the panel
decrypts it to garbage and disconnects you. You have to wait for the
nonce, mix it in, *then* encrypt.
### Quirk 2 — per-block XOR pre-whitening before AES
This one is the real headline. Before AES-encrypting any payload block,
the first two bytes of every 16-byte block get XORed with the packet's
sequence number. Same XOR mask, every block of the packet. From
`clsOmniLinkConnection.cs:396-401`:
```csharp
for (num = 0; num < PKT.Data.Length; num += 16)
{
PKT.Data[num] = (byte)(PKT.Data[num] ^ ((PKT.SequenceNumber & 0xFF00) >> 8));
PKT.Data[num + 1] = (byte)(PKT.Data[num + 1] ^ (PKT.SequenceNumber & 0xFF));
}
PKT.Data = AES.Encrypt(PKT.Data);
```
And then the inverse on receive (`:413-417`):
```csharp
PKT.Data = AES.Decrypt(PKT.Data);
for (int i = 0; i < PKT.Data.Length; i += 16)
{
PKT.Data[i] = (byte)(PKT.Data[i] ^ ((PKT.SequenceNumber & 0xFF00) >> 8));
PKT.Data[i + 1] = (byte)(PKT.Data[i + 1] ^ (PKT.SequenceNumber & 0xFF));
}
```
So the on-the-wire encryption is "AES-128-ECB of (payload XOR-prewhitened
with the seq number, two bytes per block)". A naive Omni-Link client that
just AES-ECB-encrypts the raw payload will produce ciphertext the panel
won't accept.
It feels weak — an attacker with a known-plaintext for one block can
recover the seq XOR mask trivially, and from there the whitening is
unprotected. But it's the protocol. The panel won't talk to you without
it.
We think the original intent might have been something like nonce-mixing
(use the seq as a per-packet salt to defeat ECB block-repetition
attacks), and the implementation got cargo-culted from one block to all
blocks of the packet. Doesn't matter. Implement it. Move on.
A bonus surprise: **there is no separate `Login` step on TCP.** The C#
defines `clsOL2MsgLogin` (v2 Login, opcode 42) but never instantiates
it on the TCP path. Possessing the right ControllerKey *is* the
authentication. The login opcode appears to be a serial-only artifact
from before the Ethernet module existed. The v1 serial path *does*
construct `clsOLMsgLogin` with the user's PIN; the v2 TCP path goes
straight from `ControllerAckSecureSession` to `RequestSystemInformation`.
We documented all of this in `notes/handshake.md` while it was fresh.
## 2026-05-10 around noon — first commit
```
9a02418 Initial scaffold + protocol primitives
```
uv project, ruff, pytest, mypy strict, MIT, README, gitignore explicitly
protecting any `.pca` or panel keys. Date-versioned (CalVer): `2026.5.10`.
The library lives in `src/omni_pca/`:
- `crypto.py` — AES-128-ECB plus the per-block XOR seq pre-whitening and
the `SessionKey = CK[0:11] || (CK[11:16] XOR SessionID)` derivation
- `opcodes.py` — all 12 packet types, all 104 v1 opcodes, all 83 v2
opcodes, all transcribed by hand from the decompiled enums
- `packet.py` — outer `Packet` with `encode()`/`decode()`
- `message.py` — inner `Message` with CRC-16/MODBUS
- `pca_file.py` — Borland LCG cipher, `PcaReader`, parsers for both
`.pca` and `.CFG`
49 tests passed, ruff clean. The protocol unit tests use canned bytes
extracted from the C# source; they don't need a panel to run.
## 2026-05-10 1pm — mock panel as ground truth
Second commit:
```
1901d6e Async client + mock panel + e2e roundtrip
```
The async client (`OmniConnection`, `OmniClient`) runs the four-step
secure-session handshake, frames TCP correctly (read first 16-byte block,
decrypt, learn `MessageLength`, read the rest), keeps a per-direction
monotonic sequence number that wraps `0xFFFF → 1` (skipping 0 because the
controller uses 0 for unsolicited packets), and dispatches solicited
replies to a Future while shoving unsolicited packets into a queue.
That's all well and good, but how do we test it without a panel? The
panel was at `192.168.1.9` last we knew, and we had no idea if its
network module was even on. Building a real Omni controller emulator
in Python turned out to be the right answer.
`mock_panel.py` is a TCP server that:
- accepts `ClientRequestNewSession`, generates a 5-byte SessionID,
sends back `ControllerAckNewSession` with the version bytes `00 01`
prepended
- derives the same SessionKey the client did (using the same XOR-mix)
- decrypts the `ClientRequestSecureSession`, validates that the 5-byte
echo matches the SessionID it just sent, sends back the symmetric
`ControllerAckSecureSession` (re-encrypting the same SessionID)
- handles `RequestSystemInformation`, `RequestSystemStatus`,
`RequestProperties` (Zone/Unit/Area, both absolute index and rel=1
iteration with EOD termination), and Naks anything else
It's a thin emulator but it's a *complete* protocol counterpart. Six
end-to-end tests connect a real `OmniClient` over a real TCP socket to
a real `MockPanel` and exchange real frames. They prove the handshake,
the AES, the XOR whitening, and the sequence numbering all agree —
because if any one of them is wrong, decryption produces garbage and
the connection drops.
That ground-truth check was load-bearing. It meant we could iterate on
the client all afternoon without worrying that some bug in our
encryption was being masked by a bug in our framing.
## 2026-05-10 ~1:10pm — the HA scaffold
Third commit:
```
2e43936 HA custom_component scaffold (binary_sensor for zones)
```
Drop-in Home Assistant integration at `custom_components/omni_pca/`:
manifest, config_flow with auth + reauth, coordinator with reconnect
logic, binary_sensor for each named zone with `device_class` derived
from `zone_type` (OPENING, MOTION, SMOKE, etc.). 12 unit tests for
`parse_controller_key()` because that's the one piece of pure logic
worth pinning down hard.
Status of the HA component itself wasn't validated against a running
Home Assistant — that comes next. But the HACS manifest is there, so
once we trust it we can drop it in.
## 2026-05-10 2pm — fleshing out the model surface
Fourth commit:
```
08974e2 Models: 16 status/properties dataclasses + enums + temp converters
```
The Omni protocol has a wide object surface — Zones, Units, Areas,
Thermostats, Buttons, Programs, Codes, Messages, Aux Sensors, Audio
Zones, Audio Sources, User Settings — and each has both a "properties"
record (configured, mostly static) and a "status" record (live state).
Wrote frozen-slots dataclasses for all of them, with `.parse(payload)`
classmethods that decode the byte layouts straight from the C# field
definitions. Added IntEnums for the dispatch tags (`ObjectType`,
`SecurityMode`, `HvacMode`, `FanMode`, `HoldMode`, `ThermostatKind`,
`ZoneType`, `UserSettingKind`).
One small surprise from `clsText.cs`: the temperature encoding the
panel uses is *linear*, not the non-linear thermistor scale we'd
guessed it might be. `C = raw / 2 - 40`. Easy.
42 new tests. 139 total.
## 2026-05-10 ~2:15pm — commands and events
Fifth commit:
```
68cf44a Library v1.0 phase B: command opcodes + typed system events
```
`commands.py` — the `Command` IntEnum, sourced from `enuUnitCommand.cs`
which is the canonical "all commands" enum despite the misleading name
(it covers HVAC, security, scene, button, message commands too — not
just units). One naming weirdness: `enuUnitCommand.UserSetting` (104) is
actually EXECUTE_PROGRAM. Renamed for clarity in our enum and left the
original C# alias documented inline so anyone cross-referencing won't
get confused.
`OmniClient` got 18 new methods: `execute_command`,
`execute_security_command`, `acknowledge_alerts`, `get_object_status`,
`get_extended_status`, plus convenience wrappers (`turn_unit_on`,
`set_unit_level`, `bypass_zone`, `set_thermostat_heat_setpoint_raw`,
…). All the command methods raise `CommandFailedError` on Nak.
`events.py` — the `SystemEvents` (opcode 55) decoder. The panel pushes
batches of these unsolicited; each batch contains multiple events of
different types (zone state changes, unit state changes, arming
changes, alarm activated, AC lost, battery low, phone line dead, X10
codes received, …). 28 dispatch tags, 26 typed event subclasses, an
`UnknownEvent` catch-all for opcode values we don't know yet, and an
`EventStream` helper that flattens batches across messages.
55 new tests. 194 total.
## 2026-05-10 ~2:30pm — stateful mock and the full v1.0 surface
Sixth commit:
```
c26db62 Library v1.0 phase C: stateful mock + e2e for the new surface
```
The mock got real state. `MockUnitState`, `MockAreaState`, `MockZoneState`,
`MockThermostatState`, plus a `user_codes` table for security validation.
All the new opcodes wired through:
- `Command` (20) → Ack with state mutation, dispatching UNIT_ON, UNIT_OFF,
UNIT_LEVEL, BYPASS_ZONE, RESTORE_ZONE, SET_THERMOSTAT_HEAT, etc.
- `ExecuteSecurityCommand` (74) → Ack on a valid code, Nak on invalid
- `RequestStatus` (34) → `Status` (35) for the four object kinds with
hard-coded record sizes per `clsOL2MsgStatus.cs:13-27`
- `RequestExtendedStatus` (58) → `ExtendedStatus` (59) with the
`object_length` prefix and the richer per-type fields
- `AcknowledgeAlerts` (60) → Ack
- And synthesized `SystemEvents` (55) pushed with `seq=0` whenever state
changes, so the e2e tests can subscribe to events through the real
client API and watch them roundtrip cleanly through `events.parse_events()`
9 new e2e tests — arm/disarm with code validation, unit on/off/level,
zone bypass/restore, thermostat setpoint, push events for arming and
unit changes, acknowledge_alerts. 203 total passing, 2 skipped (the
HA harness and a `.pca` fixture we don't ship).
The library has the v1.0 surface: read, command, status, extended status,
events. All exercised by an in-process emulator that speaks the same
protocol as the real panel.
## 2026-05-10 afternoon — trying to find the real panel
Now the part that didn't go well.
The `.pca` file said the panel lived at `192.168.1.9:4369`. Tried to
connect: nothing. TCP SYN, no SYN-ACK. Pinged: silent. nmap'd the
subnet to make sure we were on the right network:
- `192.168.1.7`, `.8`, `.11` — open ports including SSH with banner
`SSH-2.0-dropbear_2018.76`. Three OmniTouch 7 touchscreens. They're
the wall-mounted controllers; they live on the same LAN as the panel,
speak Omni-Link II to the panel themselves, and run a stripped Linux
with dropbear for the firmware updater. Confirmed by the SSH banner
date (2018) lining up with the OmniTouch 7 firmware era.
- `.6` — likely the panel itself, but no open ports, no response.
- `.9` — also dark. The 2018 IP either changed or the network module
was disabled at some point.
So the panel is sitting there, doing its job (the touchscreens clearly
work — they're on the network), but its Ethernet/Omni-Link II module is
either turned off in the panel's setup menu or the network bridge
hardware is bad. We have the ControllerKey, we have the right port, we
have a fully-tested client and a mock panel that proves the client
works end-to-end — but we can't prove it against the real thing yet.
We have, in other words, built the world's most thoroughly-tested
unused integration. There is something quietly funny about that.
The fix is physical: walk over to the panel, find the menu that
enables the Ethernet module, save, reboot. Then the live validation
becomes a five-minute test. Until then, the mock is the best we have,
and the mock is a faithful enough emulator that we trust it.
## What's next
The Home Assistant custom_component is being rebuilt on top of the v1.0
library surface — alarm_control_panel, light, switch, climate, sensor,
scene, button, event entities, plus services.yaml and diagnostics. That
work is in progress and will be validated as soon as we can bring the
panel's network module online.
When we do, the moment of truth is one TCP connect to port 4369 and
one `RequestSystemInformation` exchange. If it comes back with
`Omni Pro II / 2.12 r1`, the entire stack — file decryption, key
extraction, key derivation, XOR pre-whitening, AES, the works — was
right end to end. If it comes back with `ControllerSessionTerminated`,
we missed something subtle. The mock says we didn't. We'll find out.
---
## Things worth remembering
**The "wrong key looks plausible" problem is real and recurring.**
Statistical heuristics (entropy, printable ratio, frequency analysis)
are great for telling random noise from English; they're terrible for
telling random noise from binary file plaintext. When a file format
has a known header magic, parse-the-magic beats every heuristic.
**Magic numbers in source code are gifts.** `0x12345678` as an init
value, `134775813` as an LCG multiplier, `2191` as a header length —
each one is a hard checkpoint that tells you, on first try, whether
the next four hours are going to be productive or not.
**A complete protocol counterpart is worth more than ten times its
LOC in confidence.** The mock panel was maybe 400 lines of code and
it eliminated an entire category of "is the client wrong or am I
holding it wrong" questions. Every test that connects a real client
to it through real TCP is a test that the entire stack — handshake,
encryption, framing, sequencing — agrees with itself.
**Quirk #2 (the per-block XOR pre-whitening) is the kind of thing
nobody finds without doing the work.** It's not in `jomnilinkII`,
not in `pyomnilink`, not in the public Omni-Link II writeups we
checked. The decompiled C# was unambiguous and twice-redundant
(once for encrypt, once for decrypt). Without those exact six lines
of source, an OSS client that did everything else right would still
get `ControllerSessionTerminated` on the first encrypted message,
with no useful diagnostic.
**The latent LargeVocabulary bug in PC Access is harmless but
symptomatic.** It's a copy-paste mistake — the skip path uses a
buffer sized for the no-LargeVocabulary case while the structured
path uses the LargeVocabulary size. Every panel in deployment
satisfies `Count >= Max` for the affected blocks, so the bug never
fires. But it would, on a model that doesn't, and PC Access would
silently mis-parse its own config file. The kind of bug that lives
in shipping code for a decade because nobody runs the unhappy path.

109
tests/test_ha_helpers.py Normal file
View File

@ -0,0 +1,109 @@
"""Pure-function tests for ``custom_components.omni_pca.helpers``.
These never import anything from ``homeassistant.*``, so they run in the
same venv as the rest of the library tests. The HA-bound modules
(coordinator, binary_sensor, __init__) are covered separately by
``test_ha_imports.py`` which uses ``pytest.importorskip("homeassistant")``.
"""
from __future__ import annotations
import importlib.util
import sys
from pathlib import Path
import pytest
# Load the helpers module by file path so we don't have to drag in the
# rest of the package (which imports `homeassistant.*` at module scope).
_REPO_ROOT = Path(__file__).parent.parent
_HELPERS_PATH = _REPO_ROOT / "custom_components" / "omni_pca" / "helpers.py"
def _load_helpers():
spec = importlib.util.spec_from_file_location(
"_omni_pca_helpers_under_test", _HELPERS_PATH
)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = module
spec.loader.exec_module(module)
return module
helpers = _load_helpers()
class TestDeviceClassForZoneType:
@pytest.mark.parametrize(
("zone_type", "expected"),
[
(0, "opening"), # ENTRY_EXIT
(1, "opening"), # PERIMETER
(2, "motion"), # NIGHT_INTERIOR
(3, "motion"), # AWAY_INTERIOR
(16, "safety"), # PANIC
(17, "safety"), # POLICE_EMERGENCY
(18, "safety"), # SILENT_DURESS
(19, "tamper"), # TAMPER
(20, "tamper"), # LATCHING_TAMPER
(32, "smoke"), # FIRE
(33, "smoke"), # FIRE_EMERGENCY
(34, "gas"), # GAS
(54, "cold"), # FREEZE
(55, "moisture"), # WATER
(56, "tamper"), # FIRE_TAMPER
],
)
def test_known_zone_types(self, zone_type: int, expected: str) -> None:
assert helpers.device_class_for_zone_type(zone_type) == expected
def test_unknown_zone_type_defaults_to_opening(self) -> None:
assert helpers.device_class_for_zone_type(199) == "opening"
def test_zero_is_opening(self) -> None:
assert helpers.device_class_for_zone_type(0) == "opening"
class TestIsBinaryZoneType:
@pytest.mark.parametrize("analog_type", [80, 81, 82, 83, 84])
def test_analog_types_excluded(self, analog_type: int) -> None:
assert helpers.is_binary_zone_type(analog_type) is False
@pytest.mark.parametrize(
"binary_type", [0, 1, 2, 3, 16, 19, 32, 34, 54, 55, 56, 64]
)
def test_binary_types_included(self, binary_type: int) -> None:
assert helpers.is_binary_zone_type(binary_type) is True
class TestUseLatchedAlarmForZone:
@pytest.mark.parametrize(
"latching_type",
[16, 17, 18, 19, 20, 32, 33, 34, 48, 54, 55, 56],
)
def test_latching_types(self, latching_type: int) -> None:
assert helpers.use_latched_alarm_for_zone(latching_type) is True
@pytest.mark.parametrize("contact_type", [0, 1, 2, 3, 4, 5, 6, 7, 8])
def test_contact_and_motion_types_use_current_condition(
self, contact_type: int
) -> None:
assert helpers.use_latched_alarm_for_zone(contact_type) is False
class TestPrettifyName:
@pytest.mark.parametrize(
("raw", "expected"),
[
("FRONT_DOOR", "Front Door"),
("front_door", "Front Door"),
("KITCHEN", "Kitchen"),
(" Trimmed ", "Trimmed"),
("MOTION_KIDS_ROOM", "Motion Kids Room"),
("", ""),
],
)
def test_round_trip(self, raw: str, expected: str) -> None:
assert helpers.prettify_name(raw) == expected