Skip to content

Commit

Permalink
Revert some changes for this branch
Browse files Browse the repository at this point in the history
  • Loading branch information
jmason committed Dec 16, 2022
1 parent 4507762 commit 4074303
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 26 deletions.
4 changes: 2 additions & 2 deletions custom_components/solis/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"""
from __future__ import annotations

from datetime import datetime, timedelta
from datetime import datetime
import logging
from typing import Any
import voluptuous as vol
Expand Down Expand Up @@ -148,7 +148,7 @@ def on_discovered(capabilities, cookie):
hass_sensors = create_sensors(discovered_sensors, cookie['service'], cookie['name'])
cookie['async_add_entities'](hass_sensors)
# schedule the first update in 1 minute from now:
cookie['service'].schedule_update(timedelta(minutes=1))
cookie['service'].schedule_update(1)

class SolisSensor(ServiceSubscriber, SensorEntity):
""" Representation of a Solis sensor. """
Expand Down
37 changes: 13 additions & 24 deletions custom_components/solis/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from __future__ import annotations

import logging
import time

from abc import ABC, abstractmethod
from datetime import datetime, timedelta
Expand All @@ -22,14 +21,11 @@
from .ginlong_const import (
INVERTER_ENERGY_TODAY,
INVERTER_SERIAL,
INVERTER_STATE,
INVERTER_TIMESTAMP_UPDATE
INVERTER_STATE
)

# REFRESH CONSTANTS
# Match up with the default SolisCloud API resolution of 5 minutes
SCHEDULE_OK = 5
# Attempt retries every 1 minute if we fail to talk to the API, though
SCHEDULE_OK = 2
SCHEDULE_NOK = 1

_LOGGER = logging.getLogger(__name__)
Expand Down Expand Up @@ -179,32 +175,23 @@ async def update_devices(self, data: GinlongData) -> None:
continue
(self._subscriptions[serial][attribute]).data_updated(value, self.last_updated)

async def async_update(self, *_) -> None:
async def async_update(self, *_) -> int:
"""Update the data from Ginlong portal."""
update = timedelta(minutes=SCHEDULE_NOK)
update = SCHEDULE_NOK
# Login using username and password, but only every HRS_BETWEEN_LOGIN hours
if await self._login():
inverters = self._api.inverters
if inverters is None:
return
return update
for inverter_serial in inverters:
data = await self._api.fetch_inverter_data(inverter_serial)
if data is not None:
# And finally get the inverter details
# default to updating after SCHEDULE_OK minutes;
update = timedelta(minutes=SCHEDULE_OK)
# ...but try to figure out a better next-update time based on when the API last received its data
try:
ts = getattr(data, INVERTER_TIMESTAMP_UPDATE)
nxt = dt_util.utc_from_timestamp(ts) + update + timedelta(seconds=1)
if nxt > dt_util.utcnow():
update = nxt - dt_util.utcnow()
except AttributeError:
pass # no last_update found, so keep just using SCHEDULE_OK as a safe default
update = SCHEDULE_OK
self._last_updated = datetime.now()
await self.update_devices(data)
else:
update = timedelta(minutes=SCHEDULE_NOK)
update = SCHEDULE_NOK
# Reset session and try to login again next time
await self._logout()

Expand All @@ -215,10 +202,12 @@ async def async_update(self, *_) -> None:
# Time to login again
await self._logout()

def schedule_update(self, td: timedelta) -> None:
""" Schedule an update after td time. """
nxt = dt_util.utcnow() + td
_LOGGER.debug("Scheduling next update in %s, at %s", str(td), nxt)
return update

def schedule_update(self, minute: int = 1):
""" Schedule an update after minute minutes. """
_LOGGER.debug("Scheduling next update in %s minutes.", minute)
nxt = dt_util.utcnow() + timedelta(minutes=minute)
async_track_point_in_utc_time(self._hass, self.async_update, nxt)

def schedule_discovery(self, callback, cookie: dict[str, Any], seconds: int = 1):
Expand Down

0 comments on commit 4074303

Please sign in to comment.