Skip to content

Commit

Permalink
Re-write load sites without retry
Browse files Browse the repository at this point in the history
  • Loading branch information
autoSteve committed Dec 30, 2024
1 parent 2768066 commit ded175f
Show file tree
Hide file tree
Showing 4 changed files with 137 additions and 134 deletions.
6 changes: 5 additions & 1 deletion custom_components/solcast_solar/sim/simulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,10 @@
],
"counter": 0,
},
"no_sites": {
"sites": [],
"counter": 0,
},
}
FORECAST = 0.9
FORECAST_10 = 0.75
Expand Down Expand Up @@ -192,7 +196,7 @@ def raw_get_sites(api_key):
meta = {
"page_count": 1,
"current_page": 1,
"total_records": 1,
"total_records": len(API_KEY_SITES[api_key]["sites"]),
}
return sites | meta

Expand Down
217 changes: 90 additions & 127 deletions custom_components/solcast_solar/solcastapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,175 +552,138 @@ async def __sites_data(self): # noqa: C901
the sites cannot be loaded then the integration cannot function, and this will
result in Home Assistant repeatedly trying to initialise.
If the sites cache exists then it is loaded immediately on first error.
If the sites cache exists then it is loaded on API error.
"""

async def load_cache(cache_filename: str):
_LOGGER.info("Loading cached sites for %s", self.__redact_api_key(api_key))
async with aiofiles.open(cache_filename) as file:
return json.loads(await file.read())

async def save_cache(cache_filename: str, response_data: dict):
_LOGGER.debug("Writing sites cache for %s", self.__redact_api_key(api_key))
async with self._serialise_lock, aiofiles.open(cache_filename, "w") as file:
await file.write(json.dumps(response_json, ensure_ascii=False))

def cached_sites_unavailable(at_least_one_only: bool = False):
if not at_least_one_only:
_LOGGER.error(
"Cached sites are not yet available for %s to cope with API call failure",
self.__redact_api_key(api_key),
)
_LOGGER.error("At least one successful API 'get sites' call is needed, so the integration will not function correctly")

try:
self.sites = []

def redact_lat_lon(s) -> str:
return re.sub(r"itude\': [0-9\-\.]+", "itude': **.******", s)

def set_sites(response_json: dict, api_key: str):
sites_data = cast(dict, response_json)
_LOGGER.debug(
"Sites data: %s",
self.__redact_msg_api_key(redact_lat_lon(str(sites_data)), api_key),
)
for site in sites_data["sites"]:
site["api_key"] = api_key
site.pop("longitude", None)
site.pop("latitude", None)
self.sites = self.sites + sites_data["sites"]
self._api_used_reset[api_key] = None
if not self.previously_loaded:
_LOGGER.info(
"Sites loaded%s",
(" for " + self.__redact_api_key(api_key)) if self.__is_multi_key() else "",
)

api_keys = self.options.api_key.split(",")
for api_key in api_keys:
api_key = api_key.strip()
async with asyncio.timeout(60):
async with asyncio.timeout(10):
cache_filename = self.__get_sites_cache_filename(api_key)
_LOGGER.debug(
"%s",
"Sites cache " + ("exists" if Path(cache_filename).is_file() else "does not yet exist"),
)
url = f"{self.options.host}/rooftop_sites"
params = {"format": "json", "api_key": api_key}
_LOGGER.debug(
"Connecting to %s?format=json&api_key=%s",
url,
self.__redact_api_key(api_key),
)
retries = 3
retry = retries
_LOGGER.debug("Connecting to %s?format=json&api_key=%s", url, self.__redact_api_key(api_key))

success = False
use_cache_immediate = False
cache_exists = Path(cache_filename).is_file()
while retry >= 0:
response: ClientResponse = await self._aiohttp_session.get(url=url, params=params, headers=self.headers, ssl=False)
response: ClientResponse = await self._aiohttp_session.get(url=url, params=params, headers=self.headers, ssl=False)

status = response.status
(_LOGGER.debug if status == 200 else _LOGGER.warning)(
"HTTP session returned status %s in __sites_data()%s",
self.__translate(status),
", trying cache" if status != 200 else "",
)
try:
response_json = await response.json(content_type=None)
except json.decoder.JSONDecodeError: # pragma: no cover, handle unexpected exceptions
_LOGGER.error("JSONDecodeError in __sites_data(): Solcast could be having problems")
except: # pragma: no cover, handle unexpected exceptions
raise
status = response.status
(_LOGGER.debug if status == 200 else _LOGGER.warning)(
"HTTP session returned status %s in __sites_data()%s",
self.__translate(status),
", trying cache" if status != 200 and cache_exists else "",
)
try:
response_json = await response.json(content_type=None)
except json.decoder.JSONDecodeError: # pragma: no cover, handle unexpected exceptions
_LOGGER.error("JSONDecodeError in __sites_data(): Solcast could be having problems")
except: # pragma: no cover, handle unexpected exceptions
raise

if status == 200:
for site in response_json["sites"]:
site["api_key"] = api_key
if response_json["total_records"] > 0:
_LOGGER.debug("Writing sites cache")
async with (
self._serialise_lock,
aiofiles.open(cache_filename, "w") as file,
):
await file.write(json.dumps(response_json, ensure_ascii=False))
success = True
break
_LOGGER.error( # pragma: no cover, simulator always returns sites
"No sites for the API key %s are configured at solcast.com",
self.__redact_api_key(api_key),
)
break # pragma: no cover, simulator always returns sites
if cache_exists: # pragma: no cover, simulator always returns sites
use_cache_immediate = True
break
if status == 401:
if status == 200:
for site in response_json["sites"]:
site["api_key"] = api_key
if response_json["total_records"] > 0:
await save_cache(cache_filename, response_json)
success = True
else:
_LOGGER.error(
"Error getting sites for the API key %s, is the key correct?",
"No sites for the API key %s are configured at solcast.com",
self.__redact_api_key(api_key),
)
break
if retry > 0: # pragma nocover, simulator always returns sites
_LOGGER.debug(
"Will retry get sites, retry %d",
(retries - retry) + 1,
)
await asyncio.sleep(5)
retry -= 1 # pragma nocover, simulator always returns sites
if status == 401 and not use_cache_immediate:
continue
cache_exists = False # Prevent cache load if no sites

if not success:
if not use_cache_immediate:
_LOGGER.warning(
"Retries exhausted gathering sites, last call result: %s, using cached data if it exists",
self.__translate(status),
)
status = 401
if cache_exists:
async with aiofiles.open(cache_filename) as file:
response_json = json.loads(await file.read())
status = 200
for site in response_json["sites"]:
if site.get("api_key") is None: # If the API key is not in the site then assume the key has not changed
site["api_key"] = api_key
if site["api_key"] not in api_keys:
status = 429
_LOGGER.debug("API key has changed so sites cache is invalid, not loading cached data")
break
else:
_LOGGER.error(
"Cached sites are not yet available for %s to cope with API call failure",
self.__redact_api_key(api_key),
"Get sites failed, last call result: %s, using cached data",
self.__translate(status),
)
else:
_LOGGER.error(
"At least one successful API 'get sites' call is needed, so the integration will not function correctly"
"Get sites failed, last call result: %s",
self.__translate(status),
)
if cache_exists:
response_json = await load_cache(cache_filename)
status = 200
# Check for API key change and cache validity
for site in response_json["sites"]: # pragma: no cover, cache validity not tested
if site.get("api_key") is None:
site["api_key"] = api_key
if site["api_key"] not in api_keys:
status = 429
_LOGGER.debug("API key has changed so sites cache is invalid, not loading cached data")
break
elif status != 200:
cached_sites_unavailable()

if status == 200:
sites_data = cast(dict, response_json)
_LOGGER.debug(
"Sites data: %s",
self.__redact_msg_api_key(redact_lat_lon(str(sites_data)), api_key),
)
for site in sites_data["sites"]:
site["api_key"] = api_key
site.pop("longitude", None)
site.pop("latitude", None)
self.sites = self.sites + sites_data["sites"]
if status == 200 and success:
set_sites(response_json, api_key)
self.sites_loaded = True
self._api_used_reset[api_key] = None
if not self.previously_loaded:
_LOGGER.info(
"Sites loaded%s",
(" for " + self.__redact_api_key(api_key)) if self.__is_multi_key() else "",
)
else:
_LOGGER.error(
"%s HTTP status error %s in __sites_data() while gathering sites",
self.options.host,
self.__translate(status),
)
raise Exception("HTTP __sites_data() error: gathering sites") # noqa: TRY002, TRY301
cached_sites_unavailable(at_least_one_only=True)
except (ClientConnectionError, ConnectionRefusedError, TimeoutError) as e:
try:
_LOGGER.warning("Error retrieving sites, attempting to continue: %s", e)
error = False
for api_key in api_keys:
api_key = api_key.strip()
cache_filename = self.__get_sites_cache_filename(api_key)
cache_exists = Path(cache_filename).is_file()
if cache_exists:
_LOGGER.info(
"Loading cached sites for %s",
self.__redact_api_key(api_key),
)
async with aiofiles.open(cache_filename) as file:
response_json = json.loads(await file.read())
sites_data = cast(dict, response_json)
_LOGGER.debug("Sites data: %s", redact_lat_lon(str(sites_data)))
for site in sites_data["sites"]:
site["api_key"] = api_key
site.pop("longitude", None)
site.pop("latitude", None)
self.sites = self.sites + sites_data["sites"]
self.sites_loaded = True
self._api_used_reset[api_key] = None
_LOGGER.info(
"Sites loaded%s",
(" for " + self.__redact_api_key(api_key)) if self.__is_multi_key() else "",
)
if Path(cache_filename).is_file(): # Cache exists, so load it
response_json = await load_cache(cache_filename)
set_sites(response_json, api_key)
self.sites_loaded = True
else:
error = True
_LOGGER.error(
"Cached sites are not yet available for %s to cope with API call failure",
self.__redact_api_key(api_key),
)
_LOGGER.error(
"At least one successful API 'get sites' call is needed, so the integration will not function correctly"
)
cached_sites_unavailable()
if error:
_LOGGER.error(
"Suggestion: Check your overall HA configuration, specifically networking related (Is IPV6 an issue for you? DNS? Proxy?)"
Expand Down
4 changes: 4 additions & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@

KEY1: Final = "1"
KEY2: Final = "2"
KEY_NO_SITES = "no_sites"
CUSTOM_HOURS: Final = 2
DEFAULT_INPUT1: Final = {
CONF_API_KEY: KEY1,
Expand Down Expand Up @@ -81,6 +82,9 @@
DEFAULT_INPUT2[BRK_HALFHOURLY] = True
DEFAULT_INPUT2[BRK_SITE_DETAILED] = True

DEFAULT_INPUT_NO_SITES = copy.deepcopy(DEFAULT_INPUT1)
DEFAULT_INPUT_NO_SITES[CONF_API_KEY] = KEY_NO_SITES

MOCK_SESSION_CONFIG = {
"api_limit": int(min(DEFAULT_INPUT2[API_QUOTA].split(","))),
"api_used": {api_key: 0 for api_key in DEFAULT_INPUT2[CONF_API_KEY].split(",")},
Expand Down
Loading

0 comments on commit ded175f

Please sign in to comment.