diff --git a/custom_components/solcast_solar/sim/simulate.py b/custom_components/solcast_solar/sim/simulate.py index 1d50e8e3..89ed25c2 100644 --- a/custom_components/solcast_solar/sim/simulate.py +++ b/custom_components/solcast_solar/sim/simulate.py @@ -116,6 +116,10 @@ ], "counter": 0, }, + "no_sites": { + "sites": [], + "counter": 0, + }, } FORECAST = 0.9 FORECAST_10 = 0.75 @@ -192,7 +196,7 @@ def raw_get_sites(api_key): meta = { "page_count": 1, "current_page": 1, - "total_records": 1, + "total_records": len(API_KEY_SITES[api_key]["sites"]), } return sites | meta diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 09ee82b9..56a3ce80 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -552,18 +552,55 @@ async def __sites_data(self): # noqa: C901 the sites cannot be loaded then the integration cannot function, and this will result in Home Assistant repeatedly trying to initialise. - If the sites cache exists then it is loaded immediately on first error. + If the sites cache exists then it is loaded on API error. """ + + async def load_cache(cache_filename: str): + _LOGGER.info("Loading cached sites for %s", self.__redact_api_key(api_key)) + async with aiofiles.open(cache_filename) as file: + return json.loads(await file.read()) + + async def save_cache(cache_filename: str, response_data: dict): + _LOGGER.debug("Writing sites cache for %s", self.__redact_api_key(api_key)) + async with self._serialise_lock, aiofiles.open(cache_filename, "w") as file: + await file.write(json.dumps(response_json, ensure_ascii=False)) + + def cached_sites_unavailable(at_least_one_only: bool = False): + if not at_least_one_only: + _LOGGER.error( + "Cached sites are not yet available for %s to cope with API call failure", + self.__redact_api_key(api_key), + ) + _LOGGER.error("At least one successful API 'get sites' call is needed, so the integration will not function correctly") + try: self.sites = [] def redact_lat_lon(s) -> str: return re.sub(r"itude\': [0-9\-\.]+", "itude': **.******", s) + def set_sites(response_json: dict, api_key: str): + sites_data = cast(dict, response_json) + _LOGGER.debug( + "Sites data: %s", + self.__redact_msg_api_key(redact_lat_lon(str(sites_data)), api_key), + ) + for site in sites_data["sites"]: + site["api_key"] = api_key + site.pop("longitude", None) + site.pop("latitude", None) + self.sites = self.sites + sites_data["sites"] + self._api_used_reset[api_key] = None + if not self.previously_loaded: + _LOGGER.info( + "Sites loaded%s", + (" for " + self.__redact_api_key(api_key)) if self.__is_multi_key() else "", + ) + api_keys = self.options.api_key.split(",") for api_key in api_keys: api_key = api_key.strip() - async with asyncio.timeout(60): + async with asyncio.timeout(10): cache_filename = self.__get_sites_cache_filename(api_key) _LOGGER.debug( "%s", @@ -571,119 +608,68 @@ def redact_lat_lon(s) -> str: ) url = f"{self.options.host}/rooftop_sites" params = {"format": "json", "api_key": api_key} - _LOGGER.debug( - "Connecting to %s?format=json&api_key=%s", - url, - self.__redact_api_key(api_key), - ) - retries = 3 - retry = retries + _LOGGER.debug("Connecting to %s?format=json&api_key=%s", url, self.__redact_api_key(api_key)) + success = False - use_cache_immediate = False cache_exists = Path(cache_filename).is_file() - while retry >= 0: - response: ClientResponse = await self._aiohttp_session.get(url=url, params=params, headers=self.headers, ssl=False) + response: ClientResponse = await self._aiohttp_session.get(url=url, params=params, headers=self.headers, ssl=False) - status = response.status - (_LOGGER.debug if status == 200 else _LOGGER.warning)( - "HTTP session returned status %s in __sites_data()%s", - self.__translate(status), - ", trying cache" if status != 200 else "", - ) - try: - response_json = await response.json(content_type=None) - except json.decoder.JSONDecodeError: # pragma: no cover, handle unexpected exceptions - _LOGGER.error("JSONDecodeError in __sites_data(): Solcast could be having problems") - except: # pragma: no cover, handle unexpected exceptions - raise + status = response.status + (_LOGGER.debug if status == 200 else _LOGGER.warning)( + "HTTP session returned status %s in __sites_data()%s", + self.__translate(status), + ", trying cache" if status != 200 and cache_exists else "", + ) + try: + response_json = await response.json(content_type=None) + except json.decoder.JSONDecodeError: # pragma: no cover, handle unexpected exceptions + _LOGGER.error("JSONDecodeError in __sites_data(): Solcast could be having problems") + except: # pragma: no cover, handle unexpected exceptions + raise - if status == 200: - for site in response_json["sites"]: - site["api_key"] = api_key - if response_json["total_records"] > 0: - _LOGGER.debug("Writing sites cache") - async with ( - self._serialise_lock, - aiofiles.open(cache_filename, "w") as file, - ): - await file.write(json.dumps(response_json, ensure_ascii=False)) - success = True - break - _LOGGER.error( # pragma: no cover, simulator always returns sites - "No sites for the API key %s are configured at solcast.com", - self.__redact_api_key(api_key), - ) - break # pragma: no cover, simulator always returns sites - if cache_exists: # pragma: no cover, simulator always returns sites - use_cache_immediate = True - break - if status == 401: + if status == 200: + for site in response_json["sites"]: + site["api_key"] = api_key + if response_json["total_records"] > 0: + await save_cache(cache_filename, response_json) + success = True + else: _LOGGER.error( - "Error getting sites for the API key %s, is the key correct?", + "No sites for the API key %s are configured at solcast.com", self.__redact_api_key(api_key), ) - break - if retry > 0: # pragma nocover, simulator always returns sites - _LOGGER.debug( - "Will retry get sites, retry %d", - (retries - retry) + 1, - ) - await asyncio.sleep(5) - retry -= 1 # pragma nocover, simulator always returns sites - if status == 401 and not use_cache_immediate: - continue + cache_exists = False # Prevent cache load if no sites + if not success: - if not use_cache_immediate: - _LOGGER.warning( - "Retries exhausted gathering sites, last call result: %s, using cached data if it exists", - self.__translate(status), - ) - status = 401 if cache_exists: - async with aiofiles.open(cache_filename) as file: - response_json = json.loads(await file.read()) - status = 200 - for site in response_json["sites"]: - if site.get("api_key") is None: # If the API key is not in the site then assume the key has not changed - site["api_key"] = api_key - if site["api_key"] not in api_keys: - status = 429 - _LOGGER.debug("API key has changed so sites cache is invalid, not loading cached data") - break - else: _LOGGER.error( - "Cached sites are not yet available for %s to cope with API call failure", - self.__redact_api_key(api_key), + "Get sites failed, last call result: %s, using cached data", + self.__translate(status), ) + else: _LOGGER.error( - "At least one successful API 'get sites' call is needed, so the integration will not function correctly" + "Get sites failed, last call result: %s", + self.__translate(status), ) + if cache_exists: + response_json = await load_cache(cache_filename) + status = 200 + # Check for API key change and cache validity + for site in response_json["sites"]: # pragma: no cover, cache validity not tested + if site.get("api_key") is None: + site["api_key"] = api_key + if site["api_key"] not in api_keys: + status = 429 + _LOGGER.debug("API key has changed so sites cache is invalid, not loading cached data") + break + elif status != 200: + cached_sites_unavailable() - if status == 200: - sites_data = cast(dict, response_json) - _LOGGER.debug( - "Sites data: %s", - self.__redact_msg_api_key(redact_lat_lon(str(sites_data)), api_key), - ) - for site in sites_data["sites"]: - site["api_key"] = api_key - site.pop("longitude", None) - site.pop("latitude", None) - self.sites = self.sites + sites_data["sites"] + if status == 200 and success: + set_sites(response_json, api_key) self.sites_loaded = True - self._api_used_reset[api_key] = None - if not self.previously_loaded: - _LOGGER.info( - "Sites loaded%s", - (" for " + self.__redact_api_key(api_key)) if self.__is_multi_key() else "", - ) else: - _LOGGER.error( - "%s HTTP status error %s in __sites_data() while gathering sites", - self.options.host, - self.__translate(status), - ) - raise Exception("HTTP __sites_data() error: gathering sites") # noqa: TRY002, TRY301 + cached_sites_unavailable(at_least_one_only=True) except (ClientConnectionError, ConnectionRefusedError, TimeoutError) as e: try: _LOGGER.warning("Error retrieving sites, attempting to continue: %s", e) @@ -691,36 +677,13 @@ def redact_lat_lon(s) -> str: for api_key in api_keys: api_key = api_key.strip() cache_filename = self.__get_sites_cache_filename(api_key) - cache_exists = Path(cache_filename).is_file() - if cache_exists: - _LOGGER.info( - "Loading cached sites for %s", - self.__redact_api_key(api_key), - ) - async with aiofiles.open(cache_filename) as file: - response_json = json.loads(await file.read()) - sites_data = cast(dict, response_json) - _LOGGER.debug("Sites data: %s", redact_lat_lon(str(sites_data))) - for site in sites_data["sites"]: - site["api_key"] = api_key - site.pop("longitude", None) - site.pop("latitude", None) - self.sites = self.sites + sites_data["sites"] - self.sites_loaded = True - self._api_used_reset[api_key] = None - _LOGGER.info( - "Sites loaded%s", - (" for " + self.__redact_api_key(api_key)) if self.__is_multi_key() else "", - ) + if Path(cache_filename).is_file(): # Cache exists, so load it + response_json = await load_cache(cache_filename) + set_sites(response_json, api_key) + self.sites_loaded = True else: error = True - _LOGGER.error( - "Cached sites are not yet available for %s to cope with API call failure", - self.__redact_api_key(api_key), - ) - _LOGGER.error( - "At least one successful API 'get sites' call is needed, so the integration will not function correctly" - ) + cached_sites_unavailable() if error: _LOGGER.error( "Suggestion: Check your overall HA configuration, specifically networking related (Is IPV6 an issue for you? DNS? Proxy?)" diff --git a/tests/__init__.py b/tests/__init__.py index 3bb40753..efedb210 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -50,6 +50,7 @@ KEY1: Final = "1" KEY2: Final = "2" +KEY_NO_SITES = "no_sites" CUSTOM_HOURS: Final = 2 DEFAULT_INPUT1: Final = { CONF_API_KEY: KEY1, @@ -81,6 +82,9 @@ DEFAULT_INPUT2[BRK_HALFHOURLY] = True DEFAULT_INPUT2[BRK_SITE_DETAILED] = True +DEFAULT_INPUT_NO_SITES = copy.deepcopy(DEFAULT_INPUT1) +DEFAULT_INPUT_NO_SITES[CONF_API_KEY] = KEY_NO_SITES + MOCK_SESSION_CONFIG = { "api_limit": int(min(DEFAULT_INPUT2[API_QUOTA].split(","))), "api_used": {api_key: 0 for api_key in DEFAULT_INPUT2[CONF_API_KEY].split(",")}, diff --git a/tests/test_init.py b/tests/test_init.py index 4b716a50..c2b541de 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -34,6 +34,7 @@ BAD_INPUT, DEFAULT_INPUT1, DEFAULT_INPUT2, + DEFAULT_INPUT_NO_SITES, ZONE, async_cleanup_integration_tests, async_init_integration, @@ -69,23 +70,41 @@ async def test_api_failure( config_dir = Path(__file__).parents[2] / "testing_config" assert config_dir.is_dir() - def assertions1(): + def assertions1_busy(): + assert "Get sites failed, last call result: 429/Try again later" in caplog.text + assert "Cached sites are not yet available" in caplog.text + assert "integration not ready yet" in caplog.text + caplog.clear() + + def assertions1_except(): assert "Error retrieving sites, attempting to continue" in caplog.text assert "Cached sites are not yet available" in caplog.text assert "integration not ready yet" in caplog.text caplog.clear() - def assertions2(): + def assertions2_busy(): + assert "Get sites failed, last call result: 429/Try again later, using cached data" in caplog.text + assert "Sites data:" in caplog.text + caplog.clear() + + def assertions2_except(): assert "Error retrieving sites, attempting to continue" in caplog.text assert "Sites data:" in caplog.text caplog.clear() + async def too_busy(assertions: callable): + mock_session_set_too_busy() + await async_init_integration(hass, DEFAULT_INPUT1) + assertions() + mock_session_clear_too_busy() + async def exceptions(assertions: callable): mock_session_set_exception(ConnectionRefusedError) await async_init_integration(hass, DEFAULT_INPUT1) assertions() mock_session_set_exception(TimeoutError) await async_init_integration(hass, DEFAULT_INPUT1) + mock_session_clear_exception() assertions() # TODO: Fix this test, it is failing because a mocked exception is not being created successfully @@ -93,18 +112,21 @@ async def exceptions(assertions: callable): # await async_init_integration(hass, DEFAULT_INPUT1) # assertions() + # Test API too busy during get sites without cache + await too_busy(assertions1_busy) # Test exceptions during get sites without cache - await exceptions(assertions1) + await exceptions(assertions1_except) # Normal start and teardown to create caches - mock_session_clear_exception() entry = await async_init_integration(hass, DEFAULT_INPUT1) assert hass.data[DOMAIN].get("has_loaded") is True assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + # Test API too busy during get sites with the cache present + await too_busy(assertions2_busy) # Test exceptions during get sites with the cache present - await exceptions(assertions2) + await exceptions(assertions2_except) finally: mock_session_clear_exception() @@ -139,6 +161,7 @@ async def _exec_update( "options", [ BAD_INPUT, + DEFAULT_INPUT_NO_SITES, DEFAULT_INPUT1, DEFAULT_INPUT2, ], @@ -157,7 +180,16 @@ async def test_init( if options == BAD_INPUT: assert hass.data[DOMAIN].get("has_loaded") is None assert "Dampening factors corrupt or not found, setting to 1.0" in caplog.text - assert "Error getting sites for the API key ******badkey, is the key correct?" in caplog.text + assert "Get sites failed, last call result: 401/Unauthorized" in caplog.text + assert "Cached sites are not yet available" in caplog.text + assert "integration not ready yet" in caplog.text + return + + if options == DEFAULT_INPUT_NO_SITES: + entry = await async_init_integration(hass, DEFAULT_INPUT_NO_SITES) + assert "No sites for the API key ******_sites are configured at solcast.com" in caplog.text + assert "Get sites failed, last call result: 200/Success" in caplog.text + assert "integration not ready yet" in caplog.text return assert hass.data[DOMAIN].get("has_loaded") is True