Skip to content

Commit

Permalink
Added some checks before restoring coordinator.data from stored state…
Browse files Browse the repository at this point in the history
…. To prevent using stale data.

Added debug logging
  • Loading branch information
Roeland authored and Roeland committed Aug 30, 2024
1 parent 98e27bf commit 6a4a15b
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 16 deletions.
19 changes: 8 additions & 11 deletions custom_components/entsoe/coordinator.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,14 @@
from __future__ import annotations

import asyncio
from datetime import timedelta
from multiprocessing import AuthenticationError
from aiohttp import ClientError
import pandas as pd
import tzdata # for timezone conversions in panda
from entsoe import EntsoePandasClient
from entsoe.exceptions import NoMatchingDataError
from requests.exceptions import HTTPError
from datetime import datetime

import tzdata # for timezone conversions in panda
import logging

from datetime import datetime

from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt
Expand Down Expand Up @@ -91,19 +86,22 @@ async def _async_update_data(self) -> dict:
self.logger.debug("Fetching ENTSO-e data")
self.logger.debug(self.area)

time_zone = dt.now().tzinfo
# We request data for yesterday up until tomorrow.
yesterday = pd.Timestamp.now(tz=self.__TIMEZONE).replace(hour=0, minute=0, second=0) - pd.Timedelta(days = 1)
tomorrow = yesterday + pd.Timedelta(hours = 71)

self.logger.debug(f"fetching prices for start date: {yesterday} to end date: {tomorrow}")
data = await self.fetch_prices(yesterday, tomorrow)
self.logger.debug(f"received data = {data}")
if data is not None:
parsed_data = self.parse_hourprices(data)
data_all = parsed_data[-48:].to_dict()
if parsed_data.size > 48:
self.logger.debug(f"received data for yesterday, today and tomorrow")
data_today = parsed_data[-48:-24].to_dict()
data_tomorrow = parsed_data[-24:].to_dict()
else:
self.logger.debug(f"received data for yesterday and today")
data_today = parsed_data[-24:].to_dict()
data_tomorrow = {}

Expand All @@ -113,8 +111,10 @@ async def _async_update_data(self) -> dict:
"dataTomorrow": data_tomorrow,
}
elif self.data is not None:
self.logger.debug(f"received no data so fallback on existing data.")
newest_timestamp_today = pd.Timestamp(list(self.data["dataToday"])[-1])
if any(self.data["dataTomorrow"]) and newest_timestamp_today < pd.Timestamp.now(newest_timestamp_today.tzinfo):
self.logger.debug(f"detected midnight switch values dataTomorrow to dataToday")
self.data["dataToday"] = self.data["dataTomorrow"]
self.data["dataTomorrow"] = {}
data_list = list(self.data["data"])
Expand Down Expand Up @@ -153,15 +153,12 @@ async def fetch_prices(self, start_date, end_date):
else:
self.logger.warning(f"Warning the integration doesn't have any up to date local data this means that entities won't get updated but access remains to restorable entities: {exc}.")



def api_update(self, start_date, end_date, api_key):
client = EntsoePandasClient(api_key=api_key)
return client.query_day_ahead_prices(
country_code=self.area, start=start_date, end=end_date
)


def processed_data(self):
filtered_hourprices = self._filter_calculated_hourprices(self.data)
return {
Expand Down
17 changes: 12 additions & 5 deletions custom_components/entsoe/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,9 +187,11 @@ async def async_added_to_hass(self):

async def async_update(self) -> None:
"""Get the latest data and updates the states."""
_LOGGER.debug(f"update function for '{self.entity_id} called.'")
value: Any = None
if self.coordinator.data is not None:
try:
_LOGGER.debug(f"current coordinator.data value: {self.coordinator.data}")
value = self.entity_description.value_fn(self.coordinator.processed_data())
#Check if value if a panda timestamp and if so convert to an HA compatible format
if isinstance(value, pd._libs.tslibs.timestamps.Timestamp):
Expand All @@ -198,7 +200,7 @@ async def async_update(self) -> None:
self._attr_native_value = value
except Exception as exc:
# No data available
_LOGGER.warning(f"Unable to update entity due to data processing error: {value} and error: {exc}")
_LOGGER.warning(f"Unable to update entity '{self.entity_id}' due to data processing error: {value} and error: {exc} , data: {self.coordinator.data}")

# These return pd.timestamp objects and are therefore not able to get into attributes
invalid_keys = {"time_min", "time_max"}
Expand All @@ -220,20 +222,25 @@ async def async_update(self) -> None:
utcnow().replace(minute=0, second=0) + timedelta(hours=1),
)


@property
def extra_restore_state_data(self) -> EntsoeSensorExtraStoredData:
"""Return sensor specific state data to be restored."""
return EntsoeSensorExtraStoredData(self._attr_native_value, None, self._attr_extra_state_attributes if hasattr(self, "_attr_extra_state_attributes") else None)


async def async_get_last_sensor_data(self):
"""Restore Entsoe-e Sensor Extra Stored Data."""
_LOGGER.debug("restoring sensor data")
if (restored_last_extra_data := await self.async_get_last_extra_data()) is None:
return None

if self.description.key == "avg_price":
self.coordinator.data = self.parse_attribute_data_to_coordinator_data(restored_last_extra_data.as_dict()["_attr_extra_state_attributes"])
if self.description.key == "avg_price" and self.coordinator.data is None:
_LOGGER.debug("fallback on stored state to fill coordinator.data object")
newest_stored_timestamp = pd.Timestamp(restored_last_extra_data.as_dict()["_attr_extra_state_attributes"].get("prices")[-1]["time"])
current_timestamp = pd.Timestamp.now(newest_stored_timestamp.tzinfo)
if newest_stored_timestamp < current_timestamp:
self.coordinator.data = self.parse_attribute_data_to_coordinator_data(restored_last_extra_data.as_dict()["_attr_extra_state_attributes"])
else:
_LOGGER.debug("Stored state dit not contain data of today. Skipped restoring coordinator data.")

return EntsoeSensorExtraStoredData.from_dict(
restored_last_extra_data.as_dict()
Expand Down

0 comments on commit 6a4a15b

Please sign in to comment.