From 9373df609385ce2bd3d3187c4f89edbc0275fac3 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 18 Jun 2024 19:37:35 +1000 Subject: [PATCH 01/11] Independent API use counter for every account --- custom_components/solcast_solar/solcastapi.py | 135 ++++++++++-------- 1 file changed, 76 insertions(+), 59 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 57eab01d..6eeca58c 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -76,8 +76,8 @@ def __init__( self.apiCacheEnabled = apiCacheEnabled self._sites = [] self._data = {'siteinfo': {}, 'last_updated': dt.fromtimestamp(0, timezone.utc).isoformat()} - self._api_used = 0 - self._api_limit = 10 + self._api_used = {} + self._api_limit = {} self._filename = options.file_path self._tz = options.tz self._dataenergy = {} @@ -197,46 +197,54 @@ async def sites_usage(self): try: sp = self.options.api_key.split(",") - params = {"api_key": sp[0]} - _LOGGER.debug(f"SOLCAST - getting API limit and usage from solcast") - async with async_timeout.timeout(60): - apiCacheFileName = "solcast-usage.json" - resp: ClientResponse = await self.aiohttp_session.get( - url=f"https://api.solcast.com.au/json/reply/GetUserUsageAllowance", params=params, ssl=False - ) - retries = 3 - retry = retries - success = False - while retry > 0: - resp_json = await resp.json(content_type=None) - status = resp.status - if status == 200: - _LOGGER.debug(f"SOLCAST - writing usage cache") - async with aiofiles.open(apiCacheFileName, 'w') as f: - await f.write(json.dumps(resp_json, ensure_ascii=False)) - retry = 0 - success = True + for spl in sp: + sitekey = spl.strip() + #params = {"format": "json", "api_key": self.options.api_key} + params = {"api_key": sitekey} + _LOGGER.debug(f"SOLCAST - getting API limit and usage from solcast for {sitekey}") + async with async_timeout.timeout(60): + if len(sp) == 1: + apiCacheFileName = "solcast-usage.json" else: - _LOGGER.debug(f"SOLCAST - will retry GET GetUserUsageAllowance, retry {(retries - retry) + 1}") - await asyncio.sleep(5) - retry -= 1 - if not success: - if statusTranslate.get(status): status = str(status) + statusTranslate[status] - _LOGGER.warning(f"SOLCAST - Timeout getting usage allowance, last call result: {status}, using cached data if it exists") - status = 404 - if file_exists(apiCacheFileName): - _LOGGER.debug(f"SOLCAST - loading cached usage") - async with aiofiles.open(apiCacheFileName) as f: - resp_json = json.loads(await f.read()) - status = 200 + apiCacheFileName = "solcast-usage-%s.json" % (spl,) + resp: ClientResponse = await self.aiohttp_session.get( + url=f"https://api.solcast.com.au/json/reply/GetUserUsageAllowance", params=params, ssl=False + ) + retries = 3 + retry = retries + success = False + while retry > 0: + resp_json = await resp.json(content_type=None) + status = resp.status + if status == 200: + _LOGGER.debug(f"SOLCAST - writing usage cache") + async with aiofiles.open(apiCacheFileName, 'w') as f: + await f.write(json.dumps(resp_json, ensure_ascii=False)) + retry = 0 + success = True + else: + _LOGGER.debug(f"SOLCAST - will retry GET GetUserUsageAllowance, retry {(retries - retry) + 1}") + await asyncio.sleep(5) + retry -= 1 + if not success: + if statusTranslate.get(status): status = str(status) + statusTranslate[status] + _LOGGER.warning(f"SOLCAST - Timeout getting usage allowance, last call result: {status}, using cached data if it exists") + status = 404 + if file_exists(apiCacheFileName): + _LOGGER.debug(f"SOLCAST - loading cached usage") + async with aiofiles.open(apiCacheFileName) as f: + resp_json = json.loads(await f.read()) + status = 200 - if status == 200: - d = cast(dict, resp_json) - self._api_limit = d.get("daily_limit", None) - self._api_used = d.get("daily_limit_consumed", None) - _LOGGER.debug(f"SOLCAST - API counter is {self._api_used}/{self._api_limit}") - else: - raise Exception(f"SOLCAST - sites_usage: gathering site data failed. Request returned Status code: {status} - Response: {resp_json}.") + if status == 200: + d = cast(dict, resp_json) + self._api_limit[sitekey] = d.get("daily_limit", None) + self._api_used[sitekey] = d.get("daily_limit_consumed", None) + _LOGGER.debug(f"SOLCAST - API counter for {sitekey} is {self._api_used[sitekey]}/{self._api_limit[sitekey]}") + else: + self._api_limit[sitekey] = 10 + self._api_used[sitekey] = 0 + raise Exception(f"SOLCAST - sites_usage: gathering site usage failed. Request returned Status code: {status} - Response: {resp_json}.") except json.decoder.JSONDecodeError: _LOGGER.error("SOLCAST - sites_usage JSONDecodeError.. The data returned from Solcast is unknown, Solcast site could be having problems") @@ -365,12 +373,16 @@ async def get_forecast_list(self, *args): def get_api_used_count(self): """Return API polling count for this UTC 24hr period""" - return self._api_used + used = 0 + for k, v in self._api_used.items(): used += v + return used def get_api_limit(self): """Return API polling limit for this account""" try: - return self._api_limit + limit = 0 + for k, v in self._api_limit.items(): limit += v + return limit except Exception: return None @@ -586,25 +598,31 @@ def get_energy_data(self) -> dict[str, Any]: async def http_data(self, dopast = False): """Request forecast data via the Solcast API.""" + if self.get_last_updated_datetime() + timedelta(minutes=15) < dt.now(timezone.utc): + _LOGGER.warning(f"SOLCAST - not requesting forecast because time is within fifteen minutes of last update ({self.get_last_updated_datetime().astimezone(self._tz)})") + return + lastday = dt.now(self._tz) + timedelta(days=7) lastday = lastday.replace(hour=23,minute=59).astimezone(timezone.utc) + failure = False for site in self._sites: _LOGGER.debug(f"SOLCAST - API polling for rooftop {site['resource_id']}") #site=site['resource_id'], apikey=site['apikey'], result = await self.http_data_call(site['resource_id'], site['apikey'], dopast) - if not result: return + if not result: + failure = True - self._data["last_updated"] = dt.now(timezone.utc).isoformat() - #await self.sites_usage() self._data["version"] = _JSON_VERSION - #self._data["weather"] = self._weather - self._loaded_data = True + if not failure: + self._data["last_updated"] = dt.now(timezone.utc).isoformat() + #await self.sites_usage() + #self._data["weather"] = self._weather + self._loaded_data = True await self.buildforcastdata() await self.serialize_data() - - async def http_data_call(self, r_id = None, api = None, dopast = False): + async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopast = False): """Request forecast data via the Solcast API.""" lastday = dt.now(self._tz) + timedelta(days=7) lastday = lastday.replace(hour=23,minute=59).astimezone(timezone.utc) @@ -618,7 +636,7 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): # This does use up an api call count too if dopast: ae = None - resp_dict = await self.fetch_data("estimated_actuals", 168, site=r_id, apikey=api, cachedname="actuals") + resp_dict = await self.fetch_data(usageCacheFileName, "estimated_actuals", 168, site=r_id, apikey=api, cachedname="actuals") if not isinstance(resp_dict, dict): _LOGGER.warning( f"SOLCAST - No data was returned for estimated_actuals so this WILL cause errors... " @@ -652,7 +670,7 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): } ) - resp_dict = await self.fetch_data("forecasts", 168, site=r_id, apikey=api, cachedname="forecasts") + resp_dict = await self.fetch_data(usageCacheFileName, "forecasts", 168, site=r_id, apikey=api, cachedname="forecasts") if resp_dict is None: return False @@ -718,7 +736,7 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): return True - async def fetch_data(self, path= "error", hours=168, site="", apikey="", cachedname="forcasts") -> dict[str, Any]: + async def fetch_data(self, usageCacheFileName, path= "error", hours=168, site="", apikey="", cachedname="forcasts") -> dict[str, Any]: """fetch data via the Solcast API.""" try: @@ -728,7 +746,6 @@ async def fetch_data(self, path= "error", hours=168, site="", apikey="", cachedn async with async_timeout.timeout(480): apiCacheFileName = cachedname + "_" + site + ".json" - usageCacheFileName = "solcast-usage.json" if self.apiCacheEnabled and file_exists(apiCacheFileName): _LOGGER.debug(f"SOLCAST - Getting cached testing data for site {site}") status = 404 @@ -737,7 +754,7 @@ async def fetch_data(self, path= "error", hours=168, site="", apikey="", cachedn status = 200 _LOGGER.debug(f"SOLCAST - Got cached file data for site {site}") else: - if self._api_used < self._api_limit: + if self._api_used[apikey] < self._api_limit[apikey]: tries = 5 counter = 1 backoff = 30 @@ -756,13 +773,13 @@ async def fetch_data(self, path= "error", hours=168, site="", apikey="", cachedn counter += 1 if status == 200: - _LOGGER.debug(f"SOLCAST - API returned data. API Counter incremented from {self._api_used} to {self._api_used + 1}") - self._api_used = self._api_used + 1 + _LOGGER.debug(f"SOLCAST - API returned data. API Counter incremented from {self._api_used[apikey]} to {self._api_used[apikey] + 1}") + self._api_used[apikey] = self._api_used[apikey] + 1 _LOGGER.debug(f"SOLCAST - writing usage cache") async with aiofiles.open(usageCacheFileName, 'w') as f: - await f.write(json.dumps({"daily_limit": self._api_limit, "daily_limit_consumed": self._api_used}, ensure_ascii=False)) + await f.write(json.dumps({"daily_limit": self._api_limit[apikey], "daily_limit_consumed": self._api_used[apikey]}, ensure_ascii=False)) else: - _LOGGER.warning(f"SOLCAST - API returned status {status}. API used {self._api_used} to {self._api_used + 1}") + _LOGGER.warning(f"SOLCAST - API returned status {status}. API used {self._api_used[apikey]} to {self._api_used[apikey] + 1}") _LOGGER.warning("This is an error with the data returned from Solcast, not the integration") resp_json = await resp.json(content_type=None) @@ -778,7 +795,7 @@ async def fetch_data(self, path= "error", hours=168, site="", apikey="", cachedn _LOGGER.debug(f"SOLCAST - fetch_data code http_session status is {status}") if status == 429: - _LOGGER.warning("SOLCAST - Exceeded Solcast API allowed polling limit, or Solcast is too busy - API used is {self._api_used}/{self._api_limit}") + _LOGGER.warning("SOLCAST - Exceeded Solcast API allowed polling limit, or Solcast is too busy - API used is {self._api_used[apikey]}/{self._api_limit[apikey]}") elif status == 400: _LOGGER.warning( "SOLCAST - The rooftop site missing capacity, please specify capacity or provide historic data for tuning." From a3dadc79bbc2fae9ba4450488d4b2ca633d3d001 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 18 Jun 2024 20:16:10 +1000 Subject: [PATCH 02/11] Independent API use counter for every account take 2 --- custom_components/solcast_solar/solcastapi.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 6eeca58c..169240ef 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -598,7 +598,7 @@ def get_energy_data(self) -> dict[str, Any]: async def http_data(self, dopast = False): """Request forecast data via the Solcast API.""" - if self.get_last_updated_datetime() + timedelta(minutes=15) < dt.now(timezone.utc): + if self.get_last_updated_datetime() + timedelta(minutes=15) > dt.now(timezone.utc): _LOGGER.warning(f"SOLCAST - not requesting forecast because time is within fifteen minutes of last update ({self.get_last_updated_datetime().astimezone(self._tz)})") return @@ -609,7 +609,11 @@ async def http_data(self, dopast = False): for site in self._sites: _LOGGER.debug(f"SOLCAST - API polling for rooftop {site['resource_id']}") #site=site['resource_id'], apikey=site['apikey'], - result = await self.http_data_call(site['resource_id'], site['apikey'], dopast) + if len(self._sites) == 1: + usageCacheFileName = "solcast-usage.json" + else: + usageCacheFileName = "solcast-usage-%s.json" % (site['apikey'],) + result = await self.http_data_call(usageCacheFileName, site['resource_id'], site['apikey'], dopast) if not result: failure = True @@ -622,6 +626,7 @@ async def http_data(self, dopast = False): await self.buildforcastdata() await self.serialize_data() + async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopast = False): """Request forecast data via the Solcast API.""" lastday = dt.now(self._tz) + timedelta(days=7) From 2e8d961ef16ca1f1e3a638ebf0e01bb06a0a2c39 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 18 Jun 2024 22:36:46 +1000 Subject: [PATCH 03/11] Force all cache files to /config #43 --- custom_components/solcast_solar/__init__.py | 2 +- custom_components/solcast_solar/solcastapi.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/custom_components/solcast_solar/__init__.py b/custom_components/solcast_solar/__init__.py index db7de8cd..9e68719c 100644 --- a/custom_components/solcast_solar/__init__.py +++ b/custom_components/solcast_solar/__init__.py @@ -101,7 +101,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: options = ConnectionOptions( entry.options[CONF_API_KEY], SOLCAST_URL, - hass.config.path('solcast.json'), + hass.config.path('/config/solcast.json'), tz, optdamp, entry.options[CUSTOM_HOUR_SENSOR], diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 169240ef..0057c7c1 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -113,9 +113,9 @@ async def sites_data(self): params = {"format": "json", "api_key": spl.strip()} async with async_timeout.timeout(60): if len(sp) == 1: - apiCacheFileName = "solcast-sites.json" + apiCacheFileName = "/config/solcast-sites.json" else: - apiCacheFileName = "solcast-sites-%s.json" % (spl,) + apiCacheFileName = "/config/solcast-sites-%s.json" % (spl,) _LOGGER.debug(f"SOLCAST apiCacheEnabled={str(self.apiCacheEnabled)}, {apiCacheFileName}={str(file_exists(apiCacheFileName))}") if self.apiCacheEnabled and file_exists(apiCacheFileName): _LOGGER.debug(f"SOLCAST - loading cached sites data") @@ -204,9 +204,9 @@ async def sites_usage(self): _LOGGER.debug(f"SOLCAST - getting API limit and usage from solcast for {sitekey}") async with async_timeout.timeout(60): if len(sp) == 1: - apiCacheFileName = "solcast-usage.json" + apiCacheFileName = "/config/solcast-usage.json" else: - apiCacheFileName = "solcast-usage-%s.json" % (spl,) + apiCacheFileName = "/config/solcast-usage-%s.json" % (spl,) resp: ClientResponse = await self.aiohttp_session.get( url=f"https://api.solcast.com.au/json/reply/GetUserUsageAllowance", params=params, ssl=False ) @@ -610,9 +610,9 @@ async def http_data(self, dopast = False): _LOGGER.debug(f"SOLCAST - API polling for rooftop {site['resource_id']}") #site=site['resource_id'], apikey=site['apikey'], if len(self._sites) == 1: - usageCacheFileName = "solcast-usage.json" + usageCacheFileName = "/config/solcast-usage.json" else: - usageCacheFileName = "solcast-usage-%s.json" % (site['apikey'],) + usageCacheFileName = "/config/solcast-usage-%s.json" % (site['apikey'],) result = await self.http_data_call(usageCacheFileName, site['resource_id'], site['apikey'], dopast) if not result: failure = True @@ -750,7 +750,7 @@ async def fetch_data(self, usageCacheFileName, path= "error", hours=168, site="" _LOGGER.debug(f"SOLCAST - fetch_data code url - {url}") async with async_timeout.timeout(480): - apiCacheFileName = cachedname + "_" + site + ".json" + apiCacheFileName = '/config/' + cachedname + "_" + site + ".json" if self.apiCacheEnabled and file_exists(apiCacheFileName): _LOGGER.debug(f"SOLCAST - Getting cached testing data for site {site}") status = 404 From 5593fa81cd4681cc79c05dfd0e7560d5958dc4d7 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 19 Jun 2024 10:19:42 +1000 Subject: [PATCH 04/11] Fix typo in function name buildforecastdata() --- custom_components/solcast_solar/solcastapi.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 0057c7c1..5a39b2d2 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -330,7 +330,7 @@ async def load_saved_data(self): del jsonData['siteinfo'][ll] #create an up to date forecast and make sure the TZ fits just in case its changed - await self.buildforcastdata() + await self.buildforecastdata() if not self._loaded_data: #no file to load @@ -624,7 +624,7 @@ async def http_data(self, dopast = False): #self._data["weather"] = self._weather self._loaded_data = True - await self.buildforcastdata() + await self.buildforecastdata() await self.serialize_data() async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopast = False): @@ -851,7 +851,7 @@ def makeenergydict(self) -> dict: return wh_hours - async def buildforcastdata(self): + async def buildforecastdata(self): """build the data needed and convert where needed""" try: today = dt.now(self._tz).date() From a7445ee491805c7451fa17fe763e57a9746b6834 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 19 Jun 2024 16:50:34 +1000 Subject: [PATCH 05/11] Setting _api_used at UTC midnight properly --- custom_components/solcast_solar/coordinator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/coordinator.py b/custom_components/solcast_solar/coordinator.py index da9c22e2..92c5ee83 100644 --- a/custom_components/solcast_solar/coordinator.py +++ b/custom_components/solcast_solar/coordinator.py @@ -56,7 +56,8 @@ async def update_integration_listeners(self, *args): async def update_utcmidnight_usage_sensor_data(self, *args): try: - self.solcast._api_used = 0 + for k in self.solcast._api_used.keys(): + self.solcast._api_used[k] = 0 self.async_update_listeners() except Exception: #_LOGGER.error("SOLCAST - update_utcmidnight_usage_sensor_data: %s", traceback.format_exc()) From 65c4c25c0d6675bd884a5fbff07782fb47463a43 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 19 Jun 2024 17:53:50 +1000 Subject: [PATCH 06/11] Improve forecast fetch logging --- custom_components/solcast_solar/solcastapi.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 5a39b2d2..9a905961 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -764,7 +764,7 @@ async def fetch_data(self, usageCacheFileName, path= "error", hours=168, site="" counter = 1 backoff = 30 while counter <= 5: - _LOGGER.debug(f"SOLCAST - Fetching forecast") + _LOGGER.info(f"SOLCAST - Fetching forecast") resp: ClientResponse = await self.aiohttp_session.get( url=url, params=params, ssl=False ) @@ -773,7 +773,7 @@ async def fetch_data(self, usageCacheFileName, path= "error", hours=168, site="" if status == 429: # Solcast is busy, so delay (30 seconds * counter), plus a random number of seconds between zero and 30 delay = (counter * backoff) + random.randrange(0,30) - _LOGGER.debug(f"SOLCAST - Solcast API is busy, pausing {delay} seconds before retry") + _LOGGER.warning(f"SOLCAST - Solcast API is busy, pausing {delay} seconds before retry") await asyncio.sleep(delay) counter += 1 @@ -783,6 +783,7 @@ async def fetch_data(self, usageCacheFileName, path= "error", hours=168, site="" _LOGGER.debug(f"SOLCAST - writing usage cache") async with aiofiles.open(usageCacheFileName, 'w') as f: await f.write(json.dumps({"daily_limit": self._api_limit[apikey], "daily_limit_consumed": self._api_used[apikey]}, ensure_ascii=False)) + _LOGGER.info(f"SOLCAST - Fetch successful") else: _LOGGER.warning(f"SOLCAST - API returned status {status}. API used {self._api_used[apikey]} to {self._api_used[apikey] + 1}") _LOGGER.warning("This is an error with the data returned from Solcast, not the integration") From 1212c44336925dc3333d7b30e66e0a3dc75e5661 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Thu, 20 Jun 2024 23:27:57 +1000 Subject: [PATCH 07/11] Move json get to status 200 block --- custom_components/solcast_solar/solcastapi.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 9a905961..d18dc21f 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -643,7 +643,7 @@ async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopa ae = None resp_dict = await self.fetch_data(usageCacheFileName, "estimated_actuals", 168, site=r_id, apikey=api, cachedname="actuals") if not isinstance(resp_dict, dict): - _LOGGER.warning( + _LOGGER.error( f"SOLCAST - No data was returned for estimated_actuals so this WILL cause errors... " f"Either your limit is exhaused, internet down, what ever the case is it is " f"NOT a problem with the integration, and all other problems of sensor values being wrong will be seen" @@ -784,15 +784,15 @@ async def fetch_data(self, usageCacheFileName, path= "error", hours=168, site="" async with aiofiles.open(usageCacheFileName, 'w') as f: await f.write(json.dumps({"daily_limit": self._api_limit[apikey], "daily_limit_consumed": self._api_used[apikey]}, ensure_ascii=False)) _LOGGER.info(f"SOLCAST - Fetch successful") + + resp_json = await resp.json(content_type=None) + + if self.apiCacheEnabled: + async with aiofiles.open(apiCacheFileName, 'w') as f: + await f.write(json.dumps(resp_json, ensure_ascii=False)) else: _LOGGER.warning(f"SOLCAST - API returned status {status}. API used {self._api_used[apikey]} to {self._api_used[apikey] + 1}") _LOGGER.warning("This is an error with the data returned from Solcast, not the integration") - - resp_json = await resp.json(content_type=None) - - if self.apiCacheEnabled: - async with aiofiles.open(apiCacheFileName, 'w') as f: - await f.write(json.dumps(resp_json, ensure_ascii=False)) else: _LOGGER.warning(f"SOLCAST - API limit exceeded, not getting forecast") return None From 81478e8a72457729dacbd150d6b8f2dfe8771831 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 21 Jun 2024 19:27:36 +1000 Subject: [PATCH 08/11] Update manifest.json --- custom_components/solcast_solar/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/manifest.json b/custom_components/solcast_solar/manifest.json index 9e2b9b6a..fe49c006 100644 --- a/custom_components/solcast_solar/manifest.json +++ b/custom_components/solcast_solar/manifest.json @@ -10,5 +10,5 @@ "iot_class": "cloud_polling", "issue_tracker": "https://github.com/BJReplay/ha-solcast-solar/issues", "requirements": ["aiohttp>=3.8.5", "datetime>=4.3", "isodate>=0.6.1"], - "version": "4.0.31" + "version": "4.0.32" } From 44749c83a27670dc08a15d9922f8258543581a95 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 21 Jun 2024 20:54:44 +1000 Subject: [PATCH 09/11] Update README.md --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index 18bf69aa..28c57276 100644 --- a/README.md +++ b/README.md @@ -307,6 +307,18 @@ Modified from the great works of ## Changes +v4.0.32 +- Bug fix: Independent API use counter for each Solcast account by @autoSteve +- Bug fix: Force all caches to /config/ for all platforms (fixes Docker deployments) #43 by @autoSteve +- Improve forecast fetch/retry logging debug, info, warning choice by @autoSteve +- Suppression of consecutive forecast fetches within fifteen minutes (fixes strange mutliple fetches should a restart occur exactly when automation for fetch is triggered) by @autoSteve +- Work-around: Prevent error when 'tally' is unavailable during retry by #autoSteve + +Full Changelog: https://github.com/BJReplay/ha-solcast-solar/compare/v4.0.31...v4.0.32 + +Known issues +- The variable 'tally' should never be unavailable during a forecast fetch retry sequence, but it can be for some reason. This causes site 'forecast today' sensor to show as 'Unknown' until the retries are exhausted, or a successful fetch occurs. + v4.0.31 - docs: Changes to README.md - docs: Add troubleshooting notes. From 21ccf1d1ab61c9030b7c1d11a12ee221979e67ef Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 21 Jun 2024 21:03:04 +1000 Subject: [PATCH 10/11] Suppress error for tally get during retries #42 --- custom_components/solcast_solar/solcastapi.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index d18dc21f..e92bd648 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -396,7 +396,8 @@ def get_last_updated_datetime(self) -> dt: def get_rooftop_site_total_today(self, rooftopid) -> float: """Return a rooftop sites total kw for today""" - return self._data["siteinfo"][rooftopid]["tally"] + if self._data["siteinfo"][rooftopid].get("tally") == None: _LOGGER.warning(f"SOLCAST - 'Tally' is currently unavailable for rooftop {rooftopid}") + return self._data["siteinfo"][rooftopid].get("tally") def get_rooftop_site_extra_data(self, rooftopid = ""): """Return a rooftop sites information""" From 093adf1578f2d18ef661112bc573388f2e1b1772 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 21 Jun 2024 21:24:41 +1000 Subject: [PATCH 11/11] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 28c57276..6c7f5de7 100644 --- a/README.md +++ b/README.md @@ -313,6 +313,7 @@ v4.0.32 - Improve forecast fetch/retry logging debug, info, warning choice by @autoSteve - Suppression of consecutive forecast fetches within fifteen minutes (fixes strange mutliple fetches should a restart occur exactly when automation for fetch is triggered) by @autoSteve - Work-around: Prevent error when 'tally' is unavailable during retry by #autoSteve +- Fix for earlier HA versions not recognising version= for async_update_entry() #40 by autoSteve Full Changelog: https://github.com/BJReplay/ha-solcast-solar/compare/v4.0.31...v4.0.32