Skip to content

Commit

Permalink
Fix megakid#35 megakid#36: Planned dispatch source is None
Browse files Browse the repository at this point in the history
  • Loading branch information
pdcastro committed Mar 6, 2024
1 parent 68ba185 commit 56f0ab3
Show file tree
Hide file tree
Showing 3 changed files with 207 additions and 3 deletions.
10 changes: 10 additions & 0 deletions custom_components/octopus_intelligent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,3 +67,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
_LOGGER.debug("Octopus Intelligent System component setup finished")
return True


async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Called when the config entry is removed (the integration is deleted)."""
octopus_system: OctopusIntelligentSystem = (
hass.data[DOMAIN][entry.entry_id][OCTOPUS_SYSTEM]
)
try:
await octopus_system.async_remove_entry()
except Exception as ex: # pylint: disable=broad-exception-caught
_LOGGER.error(ex)
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Support for Octopus Intelligent Tariff in the UK."""
from datetime import timedelta, datetime, timezone
from typing import Any, override
import logging
import async_timeout

Expand All @@ -11,6 +12,7 @@
)

from .graphql_client import OctopusEnergyGraphQLClient
from .persistent_data import PersistentData
from .util import *

_LOGGER = logging.getLogger(__name__)
Expand All @@ -33,30 +35,112 @@ def __init__(self, hass, *, api_key, account_id, off_peak_start, off_peak_end):
self._off_peak_end = off_peak_end

self.client = OctopusEnergyGraphQLClient(self._api_key)

self._persistent_data = PersistentData(hass, account_id)

@property
def account_id(self):
return self._account_id

async def _async_update_data(self):
@override
async def _async_update_data(self) -> dict[str, Any]:
"""Fetch data from API endpoint.
This is the place to pre-process the data to lookup tables
so entities can quickly look up their data.
Returns:
dict: The data received from the Octopus API, for example:
{
'completedDispatches': [{
'chargeKwh': '-0.58',
'startDtUtc': '2024-02-25 02:00:00+00:00',
'endDtUtc': '2024-02-25 02:30:00+00:00',
'meta': {'location': 'AT_HOME', 'source': None},
}, {
'chargeKwh': '-0.58',
'startDtUtc': '2024-02-25 03:30:00+00:00',
'endDtUtc': '2024-02-25 04:00:00+00:00',
'meta': {'location': 'AT_HOME', 'source': None},
}],
'plannedDispatches': [{
'chargeKwh': '-0.67',
'startDtUtc': '2024-02-25 23:30:00+00:00',
'endDtUtc': '2024-02-26 00:00:00+00:00',
'meta': {'location': None, 'source': 'smart-charge'},
}, {
'chargeKwh': '-1.12',
'startDtUtc': '2024-02-26 03:00:00+00:00',
'endDtUtc': '2024-02-26 04:00:00+00:00',
'meta': {'location': None, 'source': 'smart-charge'},
}],
'registeredKrakenflexDevice': {
'chargePointMake': 'No charger (3-pin plug)',
'chargePointModel': '3-pin plug',
'chargePointPowerInKw': '2.400',
'createdAt': '2024-02-08T10:43:34.061834+00:00',
'hasToken': True,
'krakenflexDeviceId': '...',
'provider': 'ENODE',
'status': 'Live',
'suspended': False,
'vehicleBatterySizeInKwh': '58.00',
'vehicleMake': 'Volkswagen',
'vehicleModel': 'ID.3 Pro',
},
'vehicleChargingPreferences': {
'weekdayTargetSoc': 80,
'weekdayTargetTime': '08:00',
'weekendTargetSoc': 80,
'weekendTargetTime': '08:00',
},
}
"""
try:
# Note: asyncio.TimeoutError and aiohttp.ClientError are already
# handled by the data update coordinator.
async with async_timeout.timeout(90):
return await self.client.async_get_combined_state(self._account_id)
data = await self.client.async_get_combined_state(self._account_id)
self._update_planned_dispatch_sources(data)
return data
# except ApiAuthError as err:
# # Raising ConfigEntryAuthFailed will cancel future updates
# # and start a config flow with SOURCE_REAUTH (async_step_reauth)
# raise ConfigEntryAuthFailed from err
except Exception as err:
raise UpdateFailed(f"Error communicating with Octopus GraphQL API: {err}")

def _update_planned_dispatch_sources(self, data):
"""Workaround for issue #35: missing dispatch sources in Octopus API response."""
dispatches = (data or {}).get("plannedDispatches", [])
all_sources = [disp.get("meta", {}).get("source", "") for disp in dispatches]
good_sources: set[str] = {src for src in all_sources if src}
if good_sources:
if len(good_sources) > 1:
_LOGGER.warning(
"Unexpected mix of planned dispatch sources: %s", good_sources
)
# We don't expect to see a mix of non-None sources like 'bump-charge'
# and 'smart-charge' in the same planned dispatch list, but if that
# happens, play safe and avoid assuming the wrong source.
self._persistent_data.last_seen_planned_dispatch_source = ""
else:
self._persistent_data.last_seen_planned_dispatch_source = next(
iter(good_sources)
)

# Fill in any missing (None) source attribute in the planned dispatch list.
if any(not src for src in all_sources):
source = self._persistent_data.last_seen_planned_dispatch_source
_LOGGER.debug(
"Missing planned dispatch source in Octopus API response%s",
f", assuming '{source}'" if source else "",
)
if source:
for dispatch in dispatches:
meta = dispatch.get("meta", {})
if meta:
meta["source"] = meta.get("source") or source

def is_smart_charging_enabled(self):
return not self.data.get('registeredKrakenflexDevice', {}).get('suspended', False)
async def async_suspend_smart_charging(self):
Expand Down Expand Up @@ -171,6 +255,10 @@ async def async_start_boost_charge(self):
async def async_cancel_boost_charge(self):
await self.client.async_cancel_boost_charge(self._account_id)

async def async_remove_entry(self):
"""Called when the integration (config entry) is removed from Home Assistant."""
await self._persistent_data.remove()

async def start(self):
_LOGGER.debug("Starting OctopusIntelligentSystem")
try:
Expand All @@ -181,5 +269,7 @@ async def start(self):
except Exception as ex:
_LOGGER.error(f"Authentication failed : {ex.message}. You may need to check your token or create a new app in the gardena api and use the new token.")

await self._persistent_data.load()

async def stop(self):
_LOGGER.debug("Stopping OctopusIntelligentSystem")
104 changes: 104 additions & 0 deletions custom_components/octopus_intelligent/persistent_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
"""Persistent data storage for the integration, based on the HASS helpers.storage.Store class."""
import logging
from dataclasses import asdict, dataclass, InitVar
from typing import Any

from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant
from homeassistant.exceptions import IntegrationError
from homeassistant.helpers.storage import Store

from .const import DOMAIN

_LOGGER = logging.getLogger(__name__)


@dataclass
class PersistentData:
"""JSON-serialisable data persistence backed by the HASS helpers.storage.Store class.
Frequently persisting data to "disk" can have undesired side effects when HASS is
running on some edge devices like the Raspberry Pi, whose storage consists of an
SD card that wears when data is written, eventually leading to hardware failure.
For this reason, by default, the data is only saved when the HASS STOP event is
fired, indicating that Home Assistant is quitting / restarting. This includes the
frontend web UI 'Restart' command, the "docker container stop" command, CTRL-C on
the command line, and generally when the HASS process receives the SIGTERM signal.
"""
# Note: InitVar fields are not persisted. They become arguments to __post_init__().
hass: InitVar[HomeAssistant]
account_id: InitVar[str]

# ---------------------------------------------
# Start of JSON-serialisable persistent fields.
#
last_seen_planned_dispatch_source: str = "smart-charge"
#
# End of JSON-serialisable persistent fields.
# ---------------------------------------------

def __post_init__(self, hass: HomeAssistant, account_id: str):
self._hass = hass
self._store = Store[dict[str, Any]](
hass=hass,
key=f"{DOMAIN}.{account_id}",
version=1,
minor_version=1,
)
self._stop_event_listener: CALLBACK_TYPE | None = None
self.auto_save = True

@property
def auto_save(self) -> bool:
"""Return whether auto saving is enabled."""
return bool(self._stop_event_listener)

@auto_save.setter
def auto_save(self, enable: bool):
"""Enable/disable automatically calling self.save() on the HASS STOP event."""

async def _on_hass_stop(_: Event):
await self.save(raise_on_error=False)

if enable:
self._stop_event_listener = self._hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP, _on_hass_stop
)
elif self._stop_event_listener:
self._stop_event_listener()
self._stop_event_listener = None

async def load(self):
"""Load the data from persistent storage."""
try:
data: dict[str, Any] = await self._store.async_load() or {}
except Exception as ex: # pylint: disable=broad-exception-caught
data = {}
_LOGGER.error(
"Using default values for persistent data because of an error: %s", ex
)
# Explicitly save each field separately instead of using some '**data'
# unpacking syntax in order to be future-proof against schema changes
# that may add, remove or rename data fields.
self.last_seen_planned_dispatch_source = data.get(
"last_seen_planned_dispatch_source", self.last_seen_planned_dispatch_source
)

async def save(self, raise_on_error=False):
"""Save the data to persistent storage."""
try:
await self._store.async_save(asdict(self))
except Exception as ex: # pylint: disable=broad-exception-caught
msg = f"Error saving persistent data: {ex}"
if raise_on_error:
raise IntegrationError(msg) from ex
_LOGGER.error(msg)

async def remove(self, disable_auto_save=True):
"""Remove the data from persistent storage (delete the JSON file on disk)."""
if disable_auto_save:
self.auto_save = False
try:
await self._store.async_remove()
except Exception as ex: # pylint: disable=broad-exception-caught
_LOGGER.error("Error removing persistent data: %s", ex)

0 comments on commit 56f0ab3

Please sign in to comment.