Skip to content

Commit

Permalink
V2024.09.01
Browse files Browse the repository at this point in the history
Correct options/data issue in version upgrade
  • Loading branch information
petergridge authored Sep 18, 2024
1 parent 6abf82f commit d715938
Show file tree
Hide file tree
Showing 4 changed files with 65 additions and 74 deletions.
10 changes: 3 additions & 7 deletions custom_components/openweathermaphistory/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
PLATFORMS: list[str] = ["sensor"]
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)

# hass.async_create_task(
# hass.config_entries.async_forward_entry_setup(
# entry, Platform.SENSOR
# )
# )

entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True

Expand Down Expand Up @@ -68,7 +62,9 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry):

if config_entry.version == 1:
new = {**config_entry.data}
name = config_entry.options.get(CONF_NAME)
dname = config_entry.data.get(CONF_NAME,'unknown')
# name = config_entry.options.get(CONF_NAME)
name = config_entry.options.get(CONF_NAME,dname)
try:
file = os.path.join(hass.config.path(), cv.slugify(name) + '.pickle')
if exists(file):
Expand Down
1 change: 1 addition & 0 deletions custom_components/openweathermaphistory/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
CONST_PROXIMITY = 1000
#max calls in a single refresh
CONST_CALLS = 24
CONST_INITIAL = 'initial'
#max calls in any 24 hour period
CONF_MAX_CALLS = "max_calls"

Expand Down
10 changes: 3 additions & 7 deletions custom_components/openweathermaphistory/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,12 @@
CONF_SENSORCLASS,
CONF_STATECLASS,
CONF_UID,
CONST_INITIAL,
DOMAIN,
)
from .weatherhistory import Weather

SCAN_INTERVAL = timedelta(minutes=30)
SCAN_INTERVAL = timedelta(minutes=5)

_LOGGER = logging.getLogger(__name__)

Expand Down Expand Up @@ -68,7 +69,7 @@ async def async_setup_entry(
config = config_entry.data
weather = Weather(hass,config)
#initialise the weather data
weather.set_processing_type ('initial')
weather.set_processing_type (CONST_INITIAL)
await weather.async_update()
async_add_entities(await _async_create_entities(hass, config, weather))

Expand Down Expand Up @@ -114,11 +115,7 @@ def __init__(self, hass: HomeAssistant, weather) -> None:
async def _async_update_data(self):
"""Fetch data from API endpoint."""
#process n records every cycle

await self._weather.async_update()
if self._weather.remaining_backlog() > 0 and self._weather.get_processing_type() != 'initial':
self._weather.set_processing_type ('backload')
await self._weather.async_update()

class WeatherHistory(CoordinatorEntity,SensorEntity):
'''Rain factor class defn.'''
Expand Down Expand Up @@ -154,7 +151,6 @@ def __init__( # noqa: D107
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._weather.set_processing_type ('general')
self.determine_state()
self.async_write_ha_state()
_LOGGER.debug('handle coordinator')
Expand Down
118 changes: 58 additions & 60 deletions custom_components/openweathermaphistory/weatherhistory.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
CONST_API_CALL,
CONST_API_FORECAST,
CONST_CALLS,
CONST_INITIAL,
)
from .data import RestData

Expand Down Expand Up @@ -132,7 +133,7 @@ async def get_forecastdata(self):
days = data.get('daily',{})
current = data.get('current',{})
except TypeError:
return
return None

#current observations
currentdata = {"rain":current.get('rain',{}).get('1h',0)
Expand Down Expand Up @@ -282,30 +283,30 @@ async def async_update(self):

dailycalls = {'time': midnight,'count':self._daily_count}

match self._processing_type:
case 'initial':
#on start up just get the latest hour
lastdt = self.maxdict(historydata)
if lastdt is None:
lastdt = thishour - 3600
_LOGGER.debug(lastdt)
historydata = await self.async_backload(historydata)
case _:
lastdt = self.maxdict(historydata)
historydata = await self.async_backload(historydata)
if self._processing_type == CONST_INITIAL:
#on start up just get the latest hour
last_data_point = self.maxdict(historydata)
if last_data_point is None:
last_data_point = thishour - 3600
_LOGGER.debug('initial %s',last_data_point)
historydata = await self.async_backload(historydata)
else:
last_data_point = self.maxdict(historydata)
historydata = await self.async_backload(historydata)

#empty file
if lastdt is None:
lastdt = thishour - 3600

if last_data_point is None:
last_data_point = thishour - 3600
_LOGGER.debug('update %s',self._processing_type)
#get new data if required
if lastdt < thishour:
if last_data_point < thishour:
data = await self.get_forecastdata()
if data is None or data == {}:
#httpx request failed
return
currentdata = data[0]
dailydata = data[1]
_LOGGER.debug('get data %s',self._processing_type)
historydata = await self.get_data(historydata)

#recaculate the backlog
Expand Down Expand Up @@ -349,95 +350,93 @@ async def get_data(self,historydata):
thishour = int(datetime.timestamp(hour))
data = historydata
#on startup only get one hour of data to not impact HA start
if self._processing_type == 'intial':
if self._processing_type == CONST_INITIAL:
hours = 1
else:
hours = CONST_CALLS

lastdt = self.maxdict(data)
if lastdt is None:
last_data_point = self.maxdict(data)
if last_data_point is None:
#no data yet just get this hours dataaset
lastdt = thishour - 3600
last_data_point = thishour - 3600
#iterate until caught up to current hour
#or exceeded the call limit
target = min(thishour,thishour+hours*3600)
target = min(thishour,last_data_point+hours*3600)

while lastdt < target:
while last_data_point < target:
#increment last date by an hour
lastdt += 3600
hourdata = await self.gethourdata(lastdt)

last_data_point += 3600
hourdata = await self.gethourdata(last_data_point)
if hourdata == {}:
break

self._cumulative_rain += hourdata.get("rain",0)
self._cumulative_snow += hourdata.get("snow",0)

data.update({lastdt : hourdata })
data.update({last_data_point : hourdata })
#end rest loop
return data

def mindict(self,data):
"""Find minimum dictionary key."""
if data == {}:
return
return None
mini = int(next(iter(data)))
for x in data:
if int(x) < mini:
mini = int(x)
mini = min(int(x), mini)
return mini

def maxdict(self,data):
"""Find minimum dictionary key."""
if data == {}:
return
return None
maxi = int(next(iter(data)))
for x in data:
if int(x) > maxi:
maxi = int(x)
maxi = max(int(x), maxi)
return maxi

async def async_backload(self,historydata):
"""Backload data from the oldest data backward."""
"""Backload data."""
#from the oldest recieved data backward
#until all the backlog is processed
data = historydata
hour = datetime(date.today().year, date.today().month, date.today().day,datetime.now().hour)
thishour = int(datetime.timestamp(hour))
if self._processing_type == 'initial':
#limit the number of API calls in a single execution
if self._processing_type == CONST_INITIAL:
hours = 1
else:
hours = CONST_CALLS

if data == {}:
if data == {}: #new location
#the oldest data collected so far
earliestdata = thishour
startdp = thishour - 3600
targetdp = thishour - (self._initdays*3600*hours)
else:
try:
earliestdata = self.mindict(data)
except ValueError:
earliestdata = thishour

self._backlog = max(0,((self._initdays*hours*3600) - (thishour - earliestdata))/3600)
if self._backlog < 1:
return data

#the most recent data avaialble less one hour
startdp = self.mindict(data) - 3600
#the time required to back load until
targetdp = self.maxdict(data) - ((self._initdays*3600*hours)+1)

#determine last time to get data for in this iteration
end = max(targetdp, startdp-(3600*hours))
_LOGGER.debug('start %s, end %s',startdp,end)
while startdp > end :
#decrement start data point time by an hour
self._backlog -= 1
startdp -= 3600
hourdata = await self.gethourdata(startdp)
expected_earliest_data = thishour - (self._initdays*24*3600)
backlog = earliestdata - expected_earliest_data - 3600
self._backlog = max(0,backlog/3600)
if self._backlog < 1:
return data

x = 1
while x <= hours:
#get the data for the hour
data_piont_time = earliestdata-(3600*x)
hourdata = await self.gethourdata(data_piont_time)
if hourdata == {}:
return
data.update({str(startdp) : hourdata })
_LOGGER.debug(data)
#no data found so abort the loop
break
#Add the data collected oected to the weather history
data.update({str(data_piont_time) : hourdata })
#decrement the backlog
self._backlog -= 1
if self._backlog < 1:
break
x+=1

return data

async def gethourdata(self,timestamp):
Expand All @@ -463,7 +462,6 @@ async def gethourdata(self,timestamp):
except TypeError:
_LOGGER.warning('OpenWeatherMap history call failed')
return {}

#build this hours data
precipval = {}
preciptypes = ['rain','snow']
Expand Down

0 comments on commit d715938

Please sign in to comment.