diff --git a/src/antares/api_conf/api_conf.py b/src/antares/api_conf/api_conf.py index 8b26549a..6835939d 100644 --- a/src/antares/api_conf/api_conf.py +++ b/src/antares/api_conf/api_conf.py @@ -1,65 +1,65 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import requests - -from antares.config.base_configuration import BaseConfiguration -from antares.exceptions.exceptions import MissingTokenError - - -class APIconf(BaseConfiguration): - """ - APIconf defines host and token to be used for API mode - """ - - def __init__(self, api_host: str, token: str, verify: bool = True) -> None: - self._api_host: str = api_host - self._token: str = token - self._verify: bool = verify - - @property - def token(self) -> str: - return self._token - - @token.setter - def token(self, value: str) -> None: - self._token = value - - @property - def verify(self) -> bool: - return self._verify - - @property - def api_host(self) -> str: - return self._api_host - - @api_host.setter - def api_host(self, value: str) -> None: - self._api_host = value - - def get_host(self) -> str: - return self._api_host - - def get_token(self) -> str: - return self._token - - def checks_token(self) -> None: - if self._api_host not in ["localhost", "127.0.0.1"] and self._token is None: - raise MissingTokenError() - - def set_up_api_conf(self) -> requests.Session: - self.checks_token() - session = requests.Session() - if self._token: - token_bearer = f"Bearer {self._token}" - session.headers.update({"Authorization": token_bearer}) - return session +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import requests + +from antares.config.base_configuration import BaseConfiguration +from antares.exceptions.exceptions import MissingTokenError + + +class APIconf(BaseConfiguration): + """ + APIconf defines host and token to be used for API mode + """ + + def __init__(self, api_host: str, token: str, verify: bool = True) -> None: + self._api_host: str = api_host + self._token: str = token + self._verify: bool = verify + + @property + def token(self) -> str: + return self._token + + @token.setter + def token(self, value: str) -> None: + self._token = value + + @property + def verify(self) -> bool: + return self._verify + + @property + def api_host(self) -> str: + return self._api_host + + @api_host.setter + def api_host(self, value: str) -> None: + self._api_host = value + + def get_host(self) -> str: + return self._api_host + + def get_token(self) -> str: + return self._token + + def checks_token(self) -> None: + if self._api_host not in ["localhost", "127.0.0.1"] and self._token is None: + raise MissingTokenError() + + def set_up_api_conf(self) -> requests.Session: + self.checks_token() + session = requests.Session() + if self._token: + token_bearer = f"Bearer {self._token}" + session.headers.update({"Authorization": token_bearer}) + return session diff --git a/src/antares/api_conf/request_wrapper.py b/src/antares/api_conf/request_wrapper.py index a28c6ee2..1655277c 100644 --- a/src/antares/api_conf/request_wrapper.py +++ b/src/antares/api_conf/request_wrapper.py @@ -1,74 +1,64 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import json -from typing import IO, Any, Iterable, Mapping, Optional, Tuple, Union - -import requests - -from antares.exceptions.exceptions import APIError - -DATA_TYPE = Union[ - str, bytes, Mapping[str, Any], Iterable[Tuple[str, Optional[str]]], IO -] - - -def _handle_exceptions(response: requests.Response) -> requests.Response: - """ - If an exception occurred, returns APIError exception containing the AntaresWeb error message. - """ - if response.status_code - 200 < 100: - return response - try: - msg = response.json()["description"] - except (json.decoder.JSONDecodeError, KeyError): - msg = response.reason - raise APIError(msg) - - -class RequestWrapper: - """ - A wrapper around the requests library - """ - - def __init__(self, session: requests.Session): - self.session = session - - def get(self, url: str, **kwargs: Any) -> requests.Response: - response = self.session.get(url, **kwargs) - return _handle_exceptions(response) - - def post( - self, - url: str, - data: Optional[DATA_TYPE] = None, - json: Optional[Any] = None, - **kwargs: Any - ) -> requests.Response: - response = self.session.post(url, data, json, **kwargs) - return _handle_exceptions(response) - - def put( - self, url: str, data: Optional[DATA_TYPE] = None, **kwargs: Any - ) -> requests.Response: - response = self.session.put(url, data, **kwargs) - return _handle_exceptions(response) - - def patch( - self, url: str, data: Optional[DATA_TYPE] = None, **kwargs: Any - ) -> requests.Response: - response = self.session.patch(url, data, **kwargs) - return _handle_exceptions(response) - - def delete(self, url: str, **kwargs: Any) -> requests.Response: - response = self.session.delete(url, **kwargs) - return _handle_exceptions(response) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import json +from typing import IO, Any, Iterable, Mapping, Optional, Tuple, Union + +import requests + +from antares.exceptions.exceptions import APIError + +DATA_TYPE = Union[str, bytes, Mapping[str, Any], Iterable[Tuple[str, Optional[str]]], IO] + + +def _handle_exceptions(response: requests.Response) -> requests.Response: + """ + If an exception occurred, returns APIError exception containing the AntaresWeb error message. + """ + if response.status_code - 200 < 100: + return response + try: + msg = response.json()["description"] + except (json.decoder.JSONDecodeError, KeyError): + msg = response.reason + raise APIError(msg) + + +class RequestWrapper: + """ + A wrapper around the requests library + """ + + def __init__(self, session: requests.Session): + self.session = session + + def get(self, url: str, **kwargs: Any) -> requests.Response: + response = self.session.get(url, **kwargs) + return _handle_exceptions(response) + + def post( + self, url: str, data: Optional[DATA_TYPE] = None, json: Optional[Any] = None, **kwargs: Any + ) -> requests.Response: + response = self.session.post(url, data, json, **kwargs) + return _handle_exceptions(response) + + def put(self, url: str, data: Optional[DATA_TYPE] = None, **kwargs: Any) -> requests.Response: + response = self.session.put(url, data, **kwargs) + return _handle_exceptions(response) + + def patch(self, url: str, data: Optional[DATA_TYPE] = None, **kwargs: Any) -> requests.Response: + response = self.session.patch(url, data, **kwargs) + return _handle_exceptions(response) + + def delete(self, url: str, **kwargs: Any) -> requests.Response: + response = self.session.delete(url, **kwargs) + return _handle_exceptions(response) diff --git a/src/antares/config/base_configuration.py b/src/antares/config/base_configuration.py index bb8abe20..51fc5891 100644 --- a/src/antares/config/base_configuration.py +++ b/src/antares/config/base_configuration.py @@ -1,17 +1,17 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from abc import ABC - - -class BaseConfiguration(ABC): - pass +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from abc import ABC + + +class BaseConfiguration(ABC): + pass diff --git a/src/antares/config/local_configuration.py b/src/antares/config/local_configuration.py index 725877a0..6e17f1bf 100644 --- a/src/antares/config/local_configuration.py +++ b/src/antares/config/local_configuration.py @@ -1,29 +1,29 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from pathlib import Path - -from antares.config.base_configuration import BaseConfiguration - - -class LocalConfiguration(BaseConfiguration): - def __init__(self, local_path: Path, study_name: str): - self._local_path = local_path - self._study_name = study_name - - @property - def local_path(self) -> Path: - return self._local_path - - @property - def study_path(self) -> Path: - return self._local_path / self._study_name +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from pathlib import Path + +from antares.config.base_configuration import BaseConfiguration + + +class LocalConfiguration(BaseConfiguration): + def __init__(self, local_path: Path, study_name: str): + self._local_path = local_path + self._study_name = study_name + + @property + def local_path(self) -> Path: + return self._local_path + + @property + def study_path(self) -> Path: + return self._local_path / self._study_name diff --git a/src/antares/exceptions/exceptions.py b/src/antares/exceptions/exceptions.py index 35074538..d77fe662 100644 --- a/src/antares/exceptions/exceptions.py +++ b/src/antares/exceptions/exceptions.py @@ -1,324 +1,276 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from typing import List - - -class InvalidChoiceError(ValueError): - def __init__(self, message: str = "Invalid choice") -> None: - self.message = message - super().__init__(self.message) - - -class APIError(Exception): - def __init__(self, message: str) -> None: - self.message = message - super().__init__(self.message) - - -class MissingTokenError(Exception): - def __init__(self) -> None: - super().__init__("Action can't be completed, you need to provide an api_token") - - -class AreaCreationError(Exception): - def __init__(self, area_name: str, message: str) -> None: - self.message = f"Could not create the area {area_name}: " + message - super().__init__(self.message) - - -class AreaPropertiesUpdateError(Exception): - def __init__(self, area_name: str, message: str) -> None: - self.message = f"Could not update properties for area {area_name}: " + message - super().__init__(self.message) - - -class AreaUiUpdateError(Exception): - def __init__(self, area_name: str, message: str) -> None: - self.message = f"Could not update ui for area {area_name}: " + message - super().__init__(self.message) - - -class AreaDeletionError(Exception): - def __init__(self, area_name: str, message: str) -> None: - self.message = f"Could not delete the area {area_name}: " + message - super().__init__(self.message) - - -class LinkCreationError(Exception): - def __init__(self, area_from: str, area_to: str, message: str) -> None: - self.message = f"Could not create the link {area_from} / {area_to}: " + message - super().__init__(self.message) - - -class LinkPropertiesUpdateError(Exception): - def __init__(self, link_name: str, message: str) -> None: - self.message = f"Could not update properties for link {link_name}: " + message - super().__init__(self.message) - - -class LinkUiUpdateError(Exception): - def __init__(self, link_name: str, message: str) -> None: - self.message = f"Could not update ui for link {link_name}: " + message - super().__init__(self.message) - - -class LinkDeletionError(Exception): - def __init__(self, link_name: str, message: str) -> None: - self.message = f"Could not delete the link {link_name}: " + message - super().__init__(self.message) - - -class ThermalCreationError(Exception): - def __init__(self, thermal_name: str, area_id: str, message: str) -> None: - self.message = ( - f"Could not create the thermal cluster {thermal_name} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class ThermalPropertiesUpdateError(Exception): - def __init__(self, thermal_name: str, area_id: str, message: str) -> None: - self.message = ( - f"Could not update properties for thermal cluster {thermal_name} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class ThermalDeletionError(Exception): - def __init__(self, area_id: str, thermal_names: List[str], message: str) -> None: - self.message = ( - f"Could not delete the following thermal clusters: {', '.join(thermal_names)} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class HydroCreationError(Exception): - def __init__(self, area_id: str, message: str) -> None: - self.message = f"Could not create hydro inside area {area_id}: " + message - super().__init__(self.message) - - -class RenewableCreationError(Exception): - def __init__(self, renewable_name: str, area_id: str, message: str) -> None: - self.message = ( - f"Could not create the renewable cluster {renewable_name} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class RenewablePropertiesUpdateError(Exception): - def __init__(self, renewable_name: str, area_id: str, message: str) -> None: - self.message = ( - f"Could not update properties for renewable cluster {renewable_name} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class RenewableDeletionError(Exception): - def __init__(self, area_id: str, renewable_names: List[str], message: str) -> None: - self.message = ( - f"Could not delete the following renewable clusters: {', '.join(renewable_names)} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class STStorageCreationError(Exception): - def __init__(self, st_storage_name: str, area_id: str, message: str) -> None: - self.message = ( - f"Could not create the short term storage {st_storage_name} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class STStoragePropertiesUpdateError(Exception): - def __init__(self, st_storage_name: str, area_id: str, message: str) -> None: - self.message = ( - f"Could not update properties for short term storage {st_storage_name} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class STStorageMatrixDownloadError(Exception): - def __init__( - self, area_name: str, storage_name: str, matrix_name: str, message: str - ) -> None: - self.message = ( - f"Could not download {matrix_name} matrix for storage {storage_name} inside area {area_name}: " - + message - ) - super().__init__(self.message) - - -class STStorageMatrixUploadError(Exception): - def __init__( - self, area_name: str, storage_name: str, matrix_name: str, message: str - ) -> None: - self.message = ( - f"Could not upload {matrix_name} matrix for storage {storage_name} inside area {area_name}: " - + message - ) - super().__init__(self.message) - - -class STStorageDeletionError(Exception): - def __init__(self, area_id: str, st_storage_names: List[str], message: str) -> None: - self.message = ( - f"Could not delete the following short term storages: {', '.join(st_storage_names)} inside area {area_id}: " - + message - ) - super().__init__(self.message) - - -class BindingConstraintCreationError(Exception): - def __init__(self, constraint_name: str, message: str) -> None: - self.message = ( - f"Could not create the binding constraint {constraint_name}: " + message - ) - super().__init__(self.message) - - -class ConstraintPropertiesUpdateError(Exception): - def __init__(self, constraint_name: str, message: str) -> None: - self.message = ( - f"Could not update properties for binding constraint {constraint_name}: " - + message - ) - super().__init__(self.message) - - -class ConstraintMatrixUpdateError(Exception): - def __init__(self, constraint_name: str, matrix_name: str, message: str) -> None: - self.message = ( - f"Could not update matrix {matrix_name} for binding constraint {constraint_name}: " - + message - ) - super().__init__(self.message) - - -class ConstraintMatrixDownloadError(Exception): - def __init__(self, constraint_name: str, matrix_name: str, message: str) -> None: - self.message = ( - f"Could not download matrix {matrix_name} for binding constraint {constraint_name}: " - + message - ) - super().__init__(self.message) - - -class ConstraintTermAdditionError(Exception): - def __init__( - self, constraint_name: str, terms_ids: List[str], message: str - ) -> None: - self.message = ( - f"Could not add the following constraint terms: {', '.join(terms_ids)} inside constraint {constraint_name}: " - + message - ) - super().__init__(self.message) - - -class BindingConstraintDeletionError(Exception): - def __init__(self, constraint_name: str, message: str) -> None: - self.message = ( - f"Could not delete the binding constraint {constraint_name}: " + message - ) - super().__init__(self.message) - - -class ConstraintTermDeletionError(Exception): - def __init__(self, constraint_id: str, term_id: str, message: str) -> None: - self.message = ( - f"Could not delete the term {term_id} of the binding constraint {constraint_id}: " - + message - ) - super().__init__(self.message) - - -class StudyCreationError(Exception): - def __init__(self, study_name: str, message: str) -> None: - self.message = f"Could not create the study {study_name}: " + message - super().__init__(self.message) - - -class StudySettingsUpdateError(Exception): - def __init__(self, study_name: str, message: str) -> None: - self.message = f"Could not update settings for study {study_name}: " + message - super().__init__(self.message) - - -class StudyDeletionError(Exception): - def __init__(self, study_id: str, message: str) -> None: - self.message = f"Could not delete the study {study_id}: " + message - super().__init__(self.message) - - -class LoadMatrixUploadError(Exception): - def __init__(self, area_name: str, message: str) -> None: - self.message = f"Could not upload load matrix for area {area_name}: " + message - super().__init__(self.message) - - -class LoadMatrixDownloadError(Exception): - def __init__(self, area_name: str, message: str) -> None: - self.message = ( - f"Could not download load matrix for area {area_name}: " + message - ) - super().__init__(self.message) - - -class ThermalMatrixDownloadError(Exception): - def __init__( - self, area_name: str, cluster_name: str, matrix_name: str, message: str - ) -> None: - self.message = ( - f"Could not download {matrix_name} for cluster {cluster_name} inside area {area_name}: " - + message - ) - super().__init__(self.message) - - -class RenewableMatrixDownloadError(Exception): - def __init__(self, area_name: str, renewable_name: str, message: str) -> None: - self.message = ( - f"Could not download matrix for cluster {renewable_name} inside area {area_name}: " - + message - ) - super().__init__(self.message) - - -class MatrixUploadError(Exception): - def __init__(self, area_id: str, message: str) -> None: - self.message = f"Error uploading matrix for area {area_id}: {message}" - super().__init__(self.message) - - -class CustomError(Exception): - def __init__(self, message: str = "Error") -> None: - self.message = message - super().__init__(self.message) - - -class ConfigurationError(Exception): - def __init__(self, message: str = "Error") -> None: - self.message = ( - "Unsupported configuration type" + f" {message}" - if message != "Error" - else "" - ) - super().__init__(self.message) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from typing import List + + +class InvalidChoiceError(ValueError): + def __init__(self, message: str = "Invalid choice") -> None: + self.message = message + super().__init__(self.message) + + +class APIError(Exception): + def __init__(self, message: str) -> None: + self.message = message + super().__init__(self.message) + + +class MissingTokenError(Exception): + def __init__(self) -> None: + super().__init__("Action can't be completed, you need to provide an api_token") + + +class AreaCreationError(Exception): + def __init__(self, area_name: str, message: str) -> None: + self.message = f"Could not create the area {area_name}: " + message + super().__init__(self.message) + + +class AreaPropertiesUpdateError(Exception): + def __init__(self, area_name: str, message: str) -> None: + self.message = f"Could not update properties for area {area_name}: " + message + super().__init__(self.message) + + +class AreaUiUpdateError(Exception): + def __init__(self, area_name: str, message: str) -> None: + self.message = f"Could not update ui for area {area_name}: " + message + super().__init__(self.message) + + +class AreaDeletionError(Exception): + def __init__(self, area_name: str, message: str) -> None: + self.message = f"Could not delete the area {area_name}: " + message + super().__init__(self.message) + + +class LinkCreationError(Exception): + def __init__(self, area_from: str, area_to: str, message: str) -> None: + self.message = f"Could not create the link {area_from} / {area_to}: " + message + super().__init__(self.message) + + +class LinkPropertiesUpdateError(Exception): + def __init__(self, link_name: str, message: str) -> None: + self.message = f"Could not update properties for link {link_name}: " + message + super().__init__(self.message) + + +class LinkUiUpdateError(Exception): + def __init__(self, link_name: str, message: str) -> None: + self.message = f"Could not update ui for link {link_name}: " + message + super().__init__(self.message) + + +class LinkDeletionError(Exception): + def __init__(self, link_name: str, message: str) -> None: + self.message = f"Could not delete the link {link_name}: " + message + super().__init__(self.message) + + +class ThermalCreationError(Exception): + def __init__(self, thermal_name: str, area_id: str, message: str) -> None: + self.message = f"Could not create the thermal cluster {thermal_name} inside area {area_id}: " + message + super().__init__(self.message) + + +class ThermalPropertiesUpdateError(Exception): + def __init__(self, thermal_name: str, area_id: str, message: str) -> None: + self.message = ( + f"Could not update properties for thermal cluster {thermal_name} inside area {area_id}: " + message + ) + super().__init__(self.message) + + +class ThermalDeletionError(Exception): + def __init__(self, area_id: str, thermal_names: List[str], message: str) -> None: + self.message = ( + f"Could not delete the following thermal clusters: {', '.join(thermal_names)} inside area {area_id}: " + + message + ) + super().__init__(self.message) + + +class HydroCreationError(Exception): + def __init__(self, area_id: str, message: str) -> None: + self.message = f"Could not create hydro inside area {area_id}: " + message + super().__init__(self.message) + + +class RenewableCreationError(Exception): + def __init__(self, renewable_name: str, area_id: str, message: str) -> None: + self.message = f"Could not create the renewable cluster {renewable_name} inside area {area_id}: " + message + super().__init__(self.message) + + +class RenewablePropertiesUpdateError(Exception): + def __init__(self, renewable_name: str, area_id: str, message: str) -> None: + self.message = ( + f"Could not update properties for renewable cluster {renewable_name} inside area {area_id}: " + message + ) + super().__init__(self.message) + + +class RenewableDeletionError(Exception): + def __init__(self, area_id: str, renewable_names: List[str], message: str) -> None: + self.message = ( + f"Could not delete the following renewable clusters: {', '.join(renewable_names)} inside area {area_id}: " + + message + ) + super().__init__(self.message) + + +class STStorageCreationError(Exception): + def __init__(self, st_storage_name: str, area_id: str, message: str) -> None: + self.message = f"Could not create the short term storage {st_storage_name} inside area {area_id}: " + message + super().__init__(self.message) + + +class STStoragePropertiesUpdateError(Exception): + def __init__(self, st_storage_name: str, area_id: str, message: str) -> None: + self.message = ( + f"Could not update properties for short term storage {st_storage_name} inside area {area_id}: " + message + ) + super().__init__(self.message) + + +class STStorageMatrixDownloadError(Exception): + def __init__(self, area_name: str, storage_name: str, matrix_name: str, message: str) -> None: + self.message = ( + f"Could not download {matrix_name} matrix for storage {storage_name} inside area {area_name}: " + message + ) + super().__init__(self.message) + + +class STStorageMatrixUploadError(Exception): + def __init__(self, area_name: str, storage_name: str, matrix_name: str, message: str) -> None: + self.message = ( + f"Could not upload {matrix_name} matrix for storage {storage_name} inside area {area_name}: " + message + ) + super().__init__(self.message) + + +class STStorageDeletionError(Exception): + def __init__(self, area_id: str, st_storage_names: List[str], message: str) -> None: + self.message = ( + f"Could not delete the following short term storages: {', '.join(st_storage_names)} inside area {area_id}: " + + message + ) + super().__init__(self.message) + + +class BindingConstraintCreationError(Exception): + def __init__(self, constraint_name: str, message: str) -> None: + self.message = f"Could not create the binding constraint {constraint_name}: " + message + super().__init__(self.message) + + +class ConstraintPropertiesUpdateError(Exception): + def __init__(self, constraint_name: str, message: str) -> None: + self.message = f"Could not update properties for binding constraint {constraint_name}: " + message + super().__init__(self.message) + + +class ConstraintMatrixUpdateError(Exception): + def __init__(self, constraint_name: str, matrix_name: str, message: str) -> None: + self.message = f"Could not update matrix {matrix_name} for binding constraint {constraint_name}: " + message + super().__init__(self.message) + + +class ConstraintMatrixDownloadError(Exception): + def __init__(self, constraint_name: str, matrix_name: str, message: str) -> None: + self.message = f"Could not download matrix {matrix_name} for binding constraint {constraint_name}: " + message + super().__init__(self.message) + + +class ConstraintTermAdditionError(Exception): + def __init__(self, constraint_name: str, terms_ids: List[str], message: str) -> None: + self.message = ( + f"Could not add the following constraint terms: {', '.join(terms_ids)} inside constraint {constraint_name}: " + + message + ) + super().__init__(self.message) + + +class BindingConstraintDeletionError(Exception): + def __init__(self, constraint_name: str, message: str) -> None: + self.message = f"Could not delete the binding constraint {constraint_name}: " + message + super().__init__(self.message) + + +class ConstraintTermDeletionError(Exception): + def __init__(self, constraint_id: str, term_id: str, message: str) -> None: + self.message = f"Could not delete the term {term_id} of the binding constraint {constraint_id}: " + message + super().__init__(self.message) + + +class StudyCreationError(Exception): + def __init__(self, study_name: str, message: str) -> None: + self.message = f"Could not create the study {study_name}: " + message + super().__init__(self.message) + + +class StudySettingsUpdateError(Exception): + def __init__(self, study_name: str, message: str) -> None: + self.message = f"Could not update settings for study {study_name}: " + message + super().__init__(self.message) + + +class StudyDeletionError(Exception): + def __init__(self, study_id: str, message: str) -> None: + self.message = f"Could not delete the study {study_id}: " + message + super().__init__(self.message) + + +class LoadMatrixUploadError(Exception): + def __init__(self, area_name: str, message: str) -> None: + self.message = f"Could not upload load matrix for area {area_name}: " + message + super().__init__(self.message) + + +class LoadMatrixDownloadError(Exception): + def __init__(self, area_name: str, message: str) -> None: + self.message = f"Could not download load matrix for area {area_name}: " + message + super().__init__(self.message) + + +class ThermalMatrixDownloadError(Exception): + def __init__(self, area_name: str, cluster_name: str, matrix_name: str, message: str) -> None: + self.message = ( + f"Could not download {matrix_name} for cluster {cluster_name} inside area {area_name}: " + message + ) + super().__init__(self.message) + + +class RenewableMatrixDownloadError(Exception): + def __init__(self, area_name: str, renewable_name: str, message: str) -> None: + self.message = f"Could not download matrix for cluster {renewable_name} inside area {area_name}: " + message + super().__init__(self.message) + + +class MatrixUploadError(Exception): + def __init__(self, area_id: str, message: str) -> None: + self.message = f"Error uploading matrix for area {area_id}: {message}" + super().__init__(self.message) + + +class CustomError(Exception): + def __init__(self, message: str = "Error") -> None: + self.message = message + super().__init__(self.message) + + +class ConfigurationError(Exception): + def __init__(self, message: str = "Error") -> None: + self.message = "Unsupported configuration type" + f" {message}" if message != "Error" else "" + super().__init__(self.message) diff --git a/src/antares/model/area.py b/src/antares/model/area.py index cb1ce58e..3c7dc3d0 100644 --- a/src/antares/model/area.py +++ b/src/antares/model/area.py @@ -1,446 +1,416 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -""" -The Area model defines the description of the -electrical demand (load), generation fleet (clusters), -//TO_DO to be completed as implementation progress -""" - -from types import MappingProxyType -from typing import Optional, Dict, List, Any, Mapping, Set - -import pandas as pd -from pydantic import BaseModel, computed_field -from pydantic.alias_generators import to_camel - -from antares.model.commons import FilterOption, sort_filter_values -from antares.model.hydro import HydroProperties, HydroMatrixName, Hydro -from antares.model.misc_gen import MiscGen -from antares.model.renewable import RenewableCluster, RenewableClusterProperties -from antares.model.reserves import Reserves -from antares.model.solar import Solar -from antares.model.st_storage import STStorage, STStorageProperties -from antares.model.thermal import ThermalCluster, ThermalClusterProperties -from antares.model.wind import Wind -from antares.tools.contents_tool import transform_name_to_id, EnumIgnoreCase - - -class AdequacyPatchMode(EnumIgnoreCase): - """ - Adequacy patch mode. - - Only available if study version >= 830. - """ - - OUTSIDE = "outside" - INSIDE = "inside" - VIRTUAL = "virtual" - - -# todo: Warning, with filesystem, we want to avoid camel case and use link_aliasing. -class AreaProperties( - BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel -): - """ - DTO for updating area properties - """ - - energy_cost_unsupplied: Optional[float] = None - energy_cost_spilled: Optional[float] = None - non_dispatch_power: Optional[bool] = None - dispatch_hydro_power: Optional[bool] = None - other_dispatch_power: Optional[bool] = None - filter_synthesis: Optional[Set[FilterOption]] = None - filter_by_year: Optional[Set[FilterOption]] = None - # version 830 - adequacy_patch_mode: Optional[AdequacyPatchMode] = None - spread_unsupplied_energy_cost: Optional[float] = None - spread_spilled_energy_cost: Optional[float] = None - - -def config_alias_generator(field_name: str) -> str: - return field_name.replace("_", " ") - - -# TODO update to use check_if_none -class AreaPropertiesLocal(BaseModel, alias_generator=config_alias_generator): - def __init__( - self, - input_area_properties: AreaProperties = AreaProperties(), - **kwargs: Optional[Any], - ): - super().__init__(**kwargs) - self._energy_cost_unsupplied = ( - input_area_properties.energy_cost_unsupplied or 0.0 - ) - self._energy_cost_spilled = input_area_properties.energy_cost_spilled or 0.0 - self._non_dispatch_power = ( - input_area_properties.non_dispatch_power - if input_area_properties.non_dispatch_power is not None - else True - ) - self._dispatch_hydro_power = ( - input_area_properties.dispatch_hydro_power - if input_area_properties.dispatch_hydro_power is not None - else True - ) - self._other_dispatch_power = ( - input_area_properties.other_dispatch_power - if input_area_properties.other_dispatch_power is not None - else True - ) - self._filter_synthesis = input_area_properties.filter_synthesis or { - FilterOption.HOURLY, - FilterOption.DAILY, - FilterOption.WEEKLY, - FilterOption.MONTHLY, - FilterOption.ANNUAL, - } - self._filter_by_year = input_area_properties.filter_by_year or { - FilterOption.HOURLY, - FilterOption.DAILY, - FilterOption.WEEKLY, - FilterOption.MONTHLY, - FilterOption.ANNUAL, - } - self._adequacy_patch_mode = ( - input_area_properties.adequacy_patch_mode - if input_area_properties.adequacy_patch_mode - else AdequacyPatchMode.OUTSIDE - ) - self._spread_spilled_energy_cost = ( - input_area_properties.spread_spilled_energy_cost or 0.0 - ) - self._spread_unsupplied_energy_cost = ( - input_area_properties.spread_unsupplied_energy_cost or 0.0 - ) - - @computed_field # type: ignore[misc] - @property - def nodal_optimization(self) -> Mapping[str, str]: - return { - "non-dispatchable-power": f"{self._non_dispatch_power}".lower(), - "dispatchable-hydro-power": f"{self._dispatch_hydro_power}".lower(), - "other-dispatchable-power": f"{self._other_dispatch_power}".lower(), - "spread-unsupplied-energy-cost": f"{self._spread_unsupplied_energy_cost:.6f}", - "spread-spilled-energy-cost": f"{self._spread_spilled_energy_cost:.6f}", - "average-unsupplied-energy-cost": f"{self._energy_cost_unsupplied:.6f}", - "average-spilled-energy-cost": f"{self._energy_cost_spilled:.6f}", - } - - @computed_field # type: ignore[misc] - @property - def filtering(self) -> Mapping[str, str]: - return { - "filter-synthesis": ", ".join( - filter_value - for filter_value in sort_filter_values(self._filter_synthesis) - ), - "filter-year-by-year": ", ".join( - filter_value - for filter_value in sort_filter_values(self._filter_by_year) - ), - } - - def adequacy_patch_mode(self) -> dict[str, dict[str, str]]: - return { - "adequacy-patch": {"adequacy-patch-mode": self._adequacy_patch_mode.value} - } - - def yield_area_properties(self) -> AreaProperties: - return AreaProperties( - energy_cost_unsupplied=self._energy_cost_unsupplied, - energy_cost_spilled=self._energy_cost_spilled, - non_dispatch_power=self._non_dispatch_power, - dispatch_hydro_power=self._dispatch_hydro_power, - other_dispatch_power=self._other_dispatch_power, - filter_synthesis=self._filter_synthesis, - filter_by_year=self._filter_by_year, - adequacy_patch_mode=self._adequacy_patch_mode, - spread_unsupplied_energy_cost=self._spread_unsupplied_energy_cost, - spread_spilled_energy_cost=self._spread_spilled_energy_cost, - ) - - -class AreaUi( - BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel -): - """ - DTO for updating area UI - """ - - # TODO: What do these fields mean ? - - layer: Optional[int] = None - x: Optional[int] = None - y: Optional[int] = None - color_rgb: Optional[List[int]] = None - - layer_x: Optional[Dict[int, int]] = None - layer_y: Optional[Dict[int, int]] = None - layer_color: Optional[Dict[int, str]] = None - - -class AreaUiLocal(BaseModel): - """ - DTO for updating area UI locally in the ini files - """ - - def __init__( - self, - input_area_ui: AreaUi = AreaUi(), - **kwargs: Optional[Any], - ): - super().__init__(**kwargs) - self._x = input_area_ui.x or 0 - self._y = input_area_ui.y or 0 - self._color_r, self._color_g, self._color_b = input_area_ui.color_rgb or [ - 230, - 108, - 44, - ] - self._layers = input_area_ui.layer or 0 - self._layer_x = input_area_ui.layer_x or {self._layers: self._x} - self._layer_y = input_area_ui.layer_y or {self._layers: self._y} - self._layer_color = input_area_ui.layer_color or { - self._layers: f"{self._color_r} , {self._color_g} , {self._color_b}" - } - - @computed_field # type: ignore[misc] - @property - def ui(self) -> Dict[str, Optional[int]]: - return dict( - x=self._x, - y=self._y, - color_r=self._color_r, - color_g=self._color_g, - color_b=self._color_b, - layers=self._layers, - ) - - @computed_field # type: ignore[misc] - @property - def layerX(self) -> Dict[int, int]: - return self._layer_x - - @computed_field # type: ignore[misc] - @property - def layerY(self) -> Dict[int, int]: - return self._layer_y - - @computed_field # type: ignore[misc] - @property - def layerColor(self) -> Dict[int, str]: - return self._layer_color - - def yield_area_ui(self) -> AreaUi: - return AreaUi( - layer=self._layers, - x=self._x, - y=self._y, - color_rgb=[self._color_r, self._color_g, self._color_b], - layer_x=self._layer_x, - layer_y=self._layer_y, - layer_color=self._layer_color, - ) - - -class Area: - def __init__( # type: ignore # TODO: Find a way to avoid circular imports - self, - name: str, - area_service, - storage_service, - thermal_service, - renewable_service, - *, - renewables: Optional[Dict[str, RenewableCluster]] = None, - thermals: Optional[Dict[str, ThermalCluster]] = None, - st_storages: Optional[Dict[str, STStorage]] = None, - hydro: Optional[Hydro] = None, - wind: Optional[Wind] = None, - reserves: Optional[Reserves] = None, - solar: Optional[Solar] = None, - misc_gen: Optional[MiscGen] = None, - properties: Optional[AreaProperties] = None, - ui: Optional[AreaUi] = None, - ): - self._name = name - self._id = transform_name_to_id(name) - self._area_service = area_service - self._storage_service = storage_service - self._thermal_service = thermal_service - self._renewable_service = renewable_service - self._renewables = renewables or dict() - self._thermals = thermals or dict() - self._st_storages = st_storages or dict() - self._hydro = hydro - self._wind = wind - self._reserves = reserves - self._solar = solar - self._misc_gen = misc_gen - self._properties = properties or AreaProperties() - self._ui = ui or AreaUi() - - @property - def name(self) -> str: - return self._name - - @property - def id(self) -> str: - return self._id - - def get_thermals(self) -> MappingProxyType[str, ThermalCluster]: - return MappingProxyType(self._thermals) - - def get_renewables(self) -> MappingProxyType[str, RenewableCluster]: - return MappingProxyType(self._renewables) - - def get_st_storages(self) -> MappingProxyType[str, STStorage]: - return MappingProxyType(self._st_storages) - - @property - def hydro(self) -> Optional[Hydro]: - return self._hydro - - @property - def properties(self) -> AreaProperties: - return self._properties - - @property - def ui(self) -> AreaUi: - return self._ui - - def create_thermal_cluster( - self, thermal_name: str, properties: Optional[ThermalClusterProperties] = None - ) -> ThermalCluster: - thermal = self._area_service.create_thermal_cluster( - self.id, thermal_name, properties - ) - self._thermals[thermal.id] = thermal - return thermal - - def create_thermal_cluster_with_matrices( - self, - cluster_name: str, - parameters: ThermalClusterProperties, - prepro: Optional[pd.DataFrame], - modulation: Optional[pd.DataFrame], - series: Optional[pd.DataFrame], - CO2Cost: Optional[pd.DataFrame], - fuelCost: Optional[pd.DataFrame], - ) -> ThermalCluster: - thermal = self._area_service.create_thermal_cluster_with_matrices( - self.id, - cluster_name, - parameters, - prepro, - modulation, - series, - CO2Cost, - fuelCost, - ) - self._thermals[thermal.id] = thermal - return thermal - - def create_renewable_cluster( - self, - renewable_name: str, - properties: Optional[RenewableClusterProperties], - series: Optional[pd.DataFrame], - ) -> RenewableCluster: - renewable = self._area_service.create_renewable_cluster( - self.id, renewable_name, properties, series - ) - self._renewables[renewable.id] = renewable - return renewable - - def create_st_storage( - self, st_storage_name: str, properties: Optional[STStorageProperties] = None - ) -> STStorage: - storage = self._area_service.create_st_storage( - self.id, st_storage_name, properties - ) - self._st_storages[storage.id] = storage - - return storage - - def get_load_matrix(self) -> pd.DataFrame: - return self._area_service.get_load_matrix(self) - - def upload_load_matrix(self, load_matrix: pd.DataFrame) -> None: - self._area_service.upload_load_matrix(self, load_matrix) - - def delete_thermal_clusters(self, thermal_clusters: List[ThermalCluster]) -> None: - self._area_service.delete_thermal_clusters(self, thermal_clusters) - for cluster in thermal_clusters: - self._thermals.pop(cluster.id) - - def delete_thermal_cluster(self, thermal_cluster: ThermalCluster) -> None: - self.delete_thermal_clusters([thermal_cluster]) - - def delete_renewable_clusters( - self, renewable_clusters: List[RenewableCluster] - ) -> None: - self._area_service.delete_renewable_clusters(self, renewable_clusters) - for cluster in renewable_clusters: - self._renewables.pop(cluster.id) - - def delete_renewable_cluster(self, renewable_cluster: RenewableCluster) -> None: - self.delete_renewable_clusters([renewable_cluster]) - - def delete_st_storages(self, storages: List[STStorage]) -> None: - self._area_service.delete_st_storages(self, storages) - for storage in storages: - self._st_storages.pop(storage.id) - - def delete_st_storage(self, storage: STStorage) -> None: - self.delete_st_storages([storage]) - - def update_properties(self, properties: AreaProperties) -> None: - new_properties = self._area_service.update_area_properties(self, properties) - self._properties = new_properties - - def update_ui(self, ui: AreaUi) -> None: - new_ui = self._area_service.update_area_ui(self, ui) - self._ui = new_ui - - def create_wind(self, series: Optional[pd.DataFrame]) -> Wind: - wind = self._area_service.create_wind(self, series) - self._wind = wind - return wind - - def create_reserves(self, series: Optional[pd.DataFrame]) -> Reserves: - reserves = self._area_service.create_reserves(self, series) - self._reserves = reserves - return reserves - - def create_solar(self, series: Optional[pd.DataFrame]) -> Solar: - solar = self._area_service.create_solar(self, series) - self._solar = solar - return solar - - def create_misc_gen(self, series: Optional[pd.DataFrame]) -> MiscGen: - misc_gen = self._area_service.create_misc_gen(self, series) - self._misc_gen = misc_gen - return misc_gen - - def create_hydro( - self, - properties: Optional[HydroProperties] = None, - matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]] = None, - ) -> Hydro: - # todo: is it necessary to create allocation or correlation ? - hydro = self._area_service.create_hydro(self.id, properties, matrices) - self._hydro = hydro - return hydro +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +""" +The Area model defines the description of the +electrical demand (load), generation fleet (clusters), +//TO_DO to be completed as implementation progress +""" + +from types import MappingProxyType +from typing import Optional, Dict, List, Any, Mapping, Set + +import pandas as pd +from pydantic import BaseModel, computed_field +from pydantic.alias_generators import to_camel + +from antares.model.commons import FilterOption, sort_filter_values +from antares.model.hydro import HydroProperties, HydroMatrixName, Hydro +from antares.model.misc_gen import MiscGen +from antares.model.renewable import RenewableCluster, RenewableClusterProperties +from antares.model.reserves import Reserves +from antares.model.solar import Solar +from antares.model.st_storage import STStorage, STStorageProperties +from antares.model.thermal import ThermalCluster, ThermalClusterProperties +from antares.model.wind import Wind +from antares.tools.contents_tool import transform_name_to_id, EnumIgnoreCase + + +class AdequacyPatchMode(EnumIgnoreCase): + """ + Adequacy patch mode. + + Only available if study version >= 830. + """ + + OUTSIDE = "outside" + INSIDE = "inside" + VIRTUAL = "virtual" + + +# todo: Warning, with filesystem, we want to avoid camel case and use link_aliasing. +class AreaProperties(BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel): + """ + DTO for updating area properties + """ + + energy_cost_unsupplied: Optional[float] = None + energy_cost_spilled: Optional[float] = None + non_dispatch_power: Optional[bool] = None + dispatch_hydro_power: Optional[bool] = None + other_dispatch_power: Optional[bool] = None + filter_synthesis: Optional[Set[FilterOption]] = None + filter_by_year: Optional[Set[FilterOption]] = None + # version 830 + adequacy_patch_mode: Optional[AdequacyPatchMode] = None + spread_unsupplied_energy_cost: Optional[float] = None + spread_spilled_energy_cost: Optional[float] = None + + +def config_alias_generator(field_name: str) -> str: + return field_name.replace("_", " ") + + +# TODO update to use check_if_none +class AreaPropertiesLocal(BaseModel, alias_generator=config_alias_generator): + def __init__( + self, + input_area_properties: AreaProperties = AreaProperties(), + **kwargs: Optional[Any], + ): + super().__init__(**kwargs) + self._energy_cost_unsupplied = input_area_properties.energy_cost_unsupplied or 0.0 + self._energy_cost_spilled = input_area_properties.energy_cost_spilled or 0.0 + self._non_dispatch_power = ( + input_area_properties.non_dispatch_power if input_area_properties.non_dispatch_power is not None else True + ) + self._dispatch_hydro_power = ( + input_area_properties.dispatch_hydro_power + if input_area_properties.dispatch_hydro_power is not None + else True + ) + self._other_dispatch_power = ( + input_area_properties.other_dispatch_power + if input_area_properties.other_dispatch_power is not None + else True + ) + self._filter_synthesis = input_area_properties.filter_synthesis or { + FilterOption.HOURLY, + FilterOption.DAILY, + FilterOption.WEEKLY, + FilterOption.MONTHLY, + FilterOption.ANNUAL, + } + self._filter_by_year = input_area_properties.filter_by_year or { + FilterOption.HOURLY, + FilterOption.DAILY, + FilterOption.WEEKLY, + FilterOption.MONTHLY, + FilterOption.ANNUAL, + } + self._adequacy_patch_mode = ( + input_area_properties.adequacy_patch_mode + if input_area_properties.adequacy_patch_mode + else AdequacyPatchMode.OUTSIDE + ) + self._spread_spilled_energy_cost = input_area_properties.spread_spilled_energy_cost or 0.0 + self._spread_unsupplied_energy_cost = input_area_properties.spread_unsupplied_energy_cost or 0.0 + + @computed_field # type: ignore[misc] + @property + def nodal_optimization(self) -> Mapping[str, str]: + return { + "non-dispatchable-power": f"{self._non_dispatch_power}".lower(), + "dispatchable-hydro-power": f"{self._dispatch_hydro_power}".lower(), + "other-dispatchable-power": f"{self._other_dispatch_power}".lower(), + "spread-unsupplied-energy-cost": f"{self._spread_unsupplied_energy_cost:.6f}", + "spread-spilled-energy-cost": f"{self._spread_spilled_energy_cost:.6f}", + "average-unsupplied-energy-cost": f"{self._energy_cost_unsupplied:.6f}", + "average-spilled-energy-cost": f"{self._energy_cost_spilled:.6f}", + } + + @computed_field # type: ignore[misc] + @property + def filtering(self) -> Mapping[str, str]: + return { + "filter-synthesis": ", ".join(filter_value for filter_value in sort_filter_values(self._filter_synthesis)), + "filter-year-by-year": ", ".join(filter_value for filter_value in sort_filter_values(self._filter_by_year)), + } + + def adequacy_patch_mode(self) -> dict[str, dict[str, str]]: + return {"adequacy-patch": {"adequacy-patch-mode": self._adequacy_patch_mode.value}} + + def yield_area_properties(self) -> AreaProperties: + return AreaProperties( + energy_cost_unsupplied=self._energy_cost_unsupplied, + energy_cost_spilled=self._energy_cost_spilled, + non_dispatch_power=self._non_dispatch_power, + dispatch_hydro_power=self._dispatch_hydro_power, + other_dispatch_power=self._other_dispatch_power, + filter_synthesis=self._filter_synthesis, + filter_by_year=self._filter_by_year, + adequacy_patch_mode=self._adequacy_patch_mode, + spread_unsupplied_energy_cost=self._spread_unsupplied_energy_cost, + spread_spilled_energy_cost=self._spread_spilled_energy_cost, + ) + + +class AreaUi(BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel): + """ + DTO for updating area UI + """ + + # TODO: What do these fields mean ? + + layer: Optional[int] = None + x: Optional[int] = None + y: Optional[int] = None + color_rgb: Optional[List[int]] = None + + layer_x: Optional[Dict[int, int]] = None + layer_y: Optional[Dict[int, int]] = None + layer_color: Optional[Dict[int, str]] = None + + +class AreaUiLocal(BaseModel): + """ + DTO for updating area UI locally in the ini files + """ + + def __init__( + self, + input_area_ui: AreaUi = AreaUi(), + **kwargs: Optional[Any], + ): + super().__init__(**kwargs) + self._x = input_area_ui.x or 0 + self._y = input_area_ui.y or 0 + self._color_r, self._color_g, self._color_b = input_area_ui.color_rgb or [ + 230, + 108, + 44, + ] + self._layers = input_area_ui.layer or 0 + self._layer_x = input_area_ui.layer_x or {self._layers: self._x} + self._layer_y = input_area_ui.layer_y or {self._layers: self._y} + self._layer_color = input_area_ui.layer_color or { + self._layers: f"{self._color_r} , {self._color_g} , {self._color_b}" + } + + @computed_field # type: ignore[misc] + @property + def ui(self) -> Dict[str, Optional[int]]: + return dict( + x=self._x, + y=self._y, + color_r=self._color_r, + color_g=self._color_g, + color_b=self._color_b, + layers=self._layers, + ) + + @computed_field # type: ignore[misc] + @property + def layerX(self) -> Dict[int, int]: + return self._layer_x + + @computed_field # type: ignore[misc] + @property + def layerY(self) -> Dict[int, int]: + return self._layer_y + + @computed_field # type: ignore[misc] + @property + def layerColor(self) -> Dict[int, str]: + return self._layer_color + + def yield_area_ui(self) -> AreaUi: + return AreaUi( + layer=self._layers, + x=self._x, + y=self._y, + color_rgb=[self._color_r, self._color_g, self._color_b], + layer_x=self._layer_x, + layer_y=self._layer_y, + layer_color=self._layer_color, + ) + + +class Area: + def __init__( # type: ignore # TODO: Find a way to avoid circular imports + self, + name: str, + area_service, + storage_service, + thermal_service, + renewable_service, + *, + renewables: Optional[Dict[str, RenewableCluster]] = None, + thermals: Optional[Dict[str, ThermalCluster]] = None, + st_storages: Optional[Dict[str, STStorage]] = None, + hydro: Optional[Hydro] = None, + wind: Optional[Wind] = None, + reserves: Optional[Reserves] = None, + solar: Optional[Solar] = None, + misc_gen: Optional[MiscGen] = None, + properties: Optional[AreaProperties] = None, + ui: Optional[AreaUi] = None, + ): + self._name = name + self._id = transform_name_to_id(name) + self._area_service = area_service + self._storage_service = storage_service + self._thermal_service = thermal_service + self._renewable_service = renewable_service + self._renewables = renewables or dict() + self._thermals = thermals or dict() + self._st_storages = st_storages or dict() + self._hydro = hydro + self._wind = wind + self._reserves = reserves + self._solar = solar + self._misc_gen = misc_gen + self._properties = properties or AreaProperties() + self._ui = ui or AreaUi() + + @property + def name(self) -> str: + return self._name + + @property + def id(self) -> str: + return self._id + + def get_thermals(self) -> MappingProxyType[str, ThermalCluster]: + return MappingProxyType(self._thermals) + + def get_renewables(self) -> MappingProxyType[str, RenewableCluster]: + return MappingProxyType(self._renewables) + + def get_st_storages(self) -> MappingProxyType[str, STStorage]: + return MappingProxyType(self._st_storages) + + @property + def hydro(self) -> Optional[Hydro]: + return self._hydro + + @property + def properties(self) -> AreaProperties: + return self._properties + + @property + def ui(self) -> AreaUi: + return self._ui + + def create_thermal_cluster( + self, thermal_name: str, properties: Optional[ThermalClusterProperties] = None + ) -> ThermalCluster: + thermal = self._area_service.create_thermal_cluster(self.id, thermal_name, properties) + self._thermals[thermal.id] = thermal + return thermal + + def create_thermal_cluster_with_matrices( + self, + cluster_name: str, + parameters: ThermalClusterProperties, + prepro: Optional[pd.DataFrame], + modulation: Optional[pd.DataFrame], + series: Optional[pd.DataFrame], + CO2Cost: Optional[pd.DataFrame], + fuelCost: Optional[pd.DataFrame], + ) -> ThermalCluster: + thermal = self._area_service.create_thermal_cluster_with_matrices( + self.id, + cluster_name, + parameters, + prepro, + modulation, + series, + CO2Cost, + fuelCost, + ) + self._thermals[thermal.id] = thermal + return thermal + + def create_renewable_cluster( + self, + renewable_name: str, + properties: Optional[RenewableClusterProperties], + series: Optional[pd.DataFrame], + ) -> RenewableCluster: + renewable = self._area_service.create_renewable_cluster(self.id, renewable_name, properties, series) + self._renewables[renewable.id] = renewable + return renewable + + def create_st_storage(self, st_storage_name: str, properties: Optional[STStorageProperties] = None) -> STStorage: + storage = self._area_service.create_st_storage(self.id, st_storage_name, properties) + self._st_storages[storage.id] = storage + + return storage + + def get_load_matrix(self) -> pd.DataFrame: + return self._area_service.get_load_matrix(self) + + def upload_load_matrix(self, load_matrix: pd.DataFrame) -> None: + self._area_service.upload_load_matrix(self, load_matrix) + + def delete_thermal_clusters(self, thermal_clusters: List[ThermalCluster]) -> None: + self._area_service.delete_thermal_clusters(self, thermal_clusters) + for cluster in thermal_clusters: + self._thermals.pop(cluster.id) + + def delete_thermal_cluster(self, thermal_cluster: ThermalCluster) -> None: + self.delete_thermal_clusters([thermal_cluster]) + + def delete_renewable_clusters(self, renewable_clusters: List[RenewableCluster]) -> None: + self._area_service.delete_renewable_clusters(self, renewable_clusters) + for cluster in renewable_clusters: + self._renewables.pop(cluster.id) + + def delete_renewable_cluster(self, renewable_cluster: RenewableCluster) -> None: + self.delete_renewable_clusters([renewable_cluster]) + + def delete_st_storages(self, storages: List[STStorage]) -> None: + self._area_service.delete_st_storages(self, storages) + for storage in storages: + self._st_storages.pop(storage.id) + + def delete_st_storage(self, storage: STStorage) -> None: + self.delete_st_storages([storage]) + + def update_properties(self, properties: AreaProperties) -> None: + new_properties = self._area_service.update_area_properties(self, properties) + self._properties = new_properties + + def update_ui(self, ui: AreaUi) -> None: + new_ui = self._area_service.update_area_ui(self, ui) + self._ui = new_ui + + def create_wind(self, series: Optional[pd.DataFrame]) -> Wind: + wind = self._area_service.create_wind(self, series) + self._wind = wind + return wind + + def create_reserves(self, series: Optional[pd.DataFrame]) -> Reserves: + reserves = self._area_service.create_reserves(self, series) + self._reserves = reserves + return reserves + + def create_solar(self, series: Optional[pd.DataFrame]) -> Solar: + solar = self._area_service.create_solar(self, series) + self._solar = solar + return solar + + def create_misc_gen(self, series: Optional[pd.DataFrame]) -> MiscGen: + misc_gen = self._area_service.create_misc_gen(self, series) + self._misc_gen = misc_gen + return misc_gen + + def create_hydro( + self, + properties: Optional[HydroProperties] = None, + matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]] = None, + ) -> Hydro: + # todo: is it necessary to create allocation or correlation ? + hydro = self._area_service.create_hydro(self.id, properties, matrices) + self._hydro = hydro + return hydro diff --git a/src/antares/model/binding_constraint.py b/src/antares/model/binding_constraint.py index 8a35c097..4deef0a4 100644 --- a/src/antares/model/binding_constraint.py +++ b/src/antares/model/binding_constraint.py @@ -1,175 +1,155 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional, Union, List, Any, Dict - -import pandas as pd -from pydantic import BaseModel, Field, model_validator -from pydantic.alias_generators import to_camel - -from antares.tools.contents_tool import EnumIgnoreCase, transform_name_to_id - -DEFAULT_GROUP = "default" - - -class BindingConstraintFrequency(EnumIgnoreCase): - HOURLY = "hourly" - DAILY = "daily" - WEEKLY = "weekly" - - -class BindingConstraintOperator(EnumIgnoreCase): - LESS = "less" - GREATER = "greater" - BOTH = "both" - EQUAL = "equal" - - -class ConstraintMatrixName(Enum): - LESS_TERM = "lt" - EQUAL_TERM = "eq" - GREATER_TERM = "gt" - - -class TermOperators(BaseModel): - weight: Optional[float] = None - offset: Optional[int] = None - - -class LinkData(BaseModel): - """ - DTO for a constraint term on a link between two areas. - """ - - area1: str - area2: str - - -class ClusterData(BaseModel): - """ - DTO for a constraint term on a cluster in an area. - """ - - area: str - cluster: str - - -class ConstraintTerm(TermOperators): - data: Union[LinkData, ClusterData] - id: str = Field(init=False) - - @model_validator(mode="before") - def fill_id(cls, v: Dict[str, Any]) -> Dict[str, Any]: - v["id"] = cls.generate_id(v["data"]) - return v - - @classmethod - def generate_id(cls, data: Union[Dict[str, str], LinkData, ClusterData]) -> str: - if isinstance(data, dict): - if "area1" in data: - return "%".join(sorted((data["area1"].lower(), data["area2"].lower()))) - return ".".join((data["area"].lower(), data["cluster"].lower())) - elif isinstance(data, LinkData): - return "%".join(sorted((data.area1.lower(), data.area2.lower()))) - return ".".join((data.area.lower(), data.cluster.lower())) - - -class BindingConstraintProperties( - BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel -): - enabled: Optional[bool] = None - time_step: Optional[BindingConstraintFrequency] = None - operator: Optional[BindingConstraintOperator] = None - comments: Optional[str] = None - filter_year_by_year: Optional[str] = None - filter_synthesis: Optional[str] = None - group: Optional[str] = None - - -class BindingConstraint: - def __init__( # type: ignore # TODO: Find a way to avoid circular imports - self, - name: str, - binding_constraint_service, - properties: Optional[BindingConstraintProperties] = None, - terms: Optional[List[ConstraintTerm]] = None, - ): - self._name = name - self._binding_constraint_service = binding_constraint_service - self._id = transform_name_to_id(name) - self._properties = properties or BindingConstraintProperties() - self._terms = {term.id: term for term in terms} if terms else {} - - @property - def name(self) -> str: - return self._name - - @property - def id(self) -> str: - return self._id - - @property - def properties(self) -> BindingConstraintProperties: - return self._properties - - def get_terms(self) -> Dict[str, ConstraintTerm]: - return self._terms - - def add_terms(self, terms: List[ConstraintTerm]) -> None: - added_terms = self._binding_constraint_service.add_constraint_terms(self, terms) - for term in added_terms: - self._terms[term.id] = term - - def delete_term(self, term: ConstraintTerm) -> None: - self._binding_constraint_service.delete_binding_constraint_term( - self.id, term.id - ) - self._terms.pop(term.id) - - def update_properties(self, properties: BindingConstraintProperties) -> None: - new_properties = ( - self._binding_constraint_service.update_binding_constraint_properties( - self, properties - ) - ) - self._properties = new_properties - - def get_less_term_matrix(self) -> pd.DataFrame: - return self._binding_constraint_service.get_constraint_matrix( - self, ConstraintMatrixName.LESS_TERM - ) - - def get_equal_term_matrix(self) -> pd.DataFrame: - return self._binding_constraint_service.get_constraint_matrix( - self, ConstraintMatrixName.EQUAL_TERM - ) - - def get_greater_term_matrix(self) -> pd.DataFrame: - return self._binding_constraint_service.get_constraint_matrix( - self, ConstraintMatrixName.GREATER_TERM - ) - - def update_less_term_matrix(self, matrix: pd.DataFrame) -> None: - self._binding_constraint_service.update_constraint_matrix( - self, ConstraintMatrixName.LESS_TERM, matrix - ) - - def update_equal_term_matrix(self, matrix: pd.DataFrame) -> None: - self._binding_constraint_service.update_constraint_matrix( - self, ConstraintMatrixName.EQUAL_TERM, matrix - ) - - def update_greater_term_matrix(self, matrix: pd.DataFrame) -> None: - self._binding_constraint_service.update_constraint_matrix( - self, ConstraintMatrixName.GREATER_TERM, matrix - ) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional, Union, List, Any, Dict + +import pandas as pd +from pydantic import BaseModel, Field, model_validator +from pydantic.alias_generators import to_camel + +from antares.tools.contents_tool import EnumIgnoreCase, transform_name_to_id + +DEFAULT_GROUP = "default" + + +class BindingConstraintFrequency(EnumIgnoreCase): + HOURLY = "hourly" + DAILY = "daily" + WEEKLY = "weekly" + + +class BindingConstraintOperator(EnumIgnoreCase): + LESS = "less" + GREATER = "greater" + BOTH = "both" + EQUAL = "equal" + + +class ConstraintMatrixName(Enum): + LESS_TERM = "lt" + EQUAL_TERM = "eq" + GREATER_TERM = "gt" + + +class TermOperators(BaseModel): + weight: Optional[float] = None + offset: Optional[int] = None + + +class LinkData(BaseModel): + """ + DTO for a constraint term on a link between two areas. + """ + + area1: str + area2: str + + +class ClusterData(BaseModel): + """ + DTO for a constraint term on a cluster in an area. + """ + + area: str + cluster: str + + +class ConstraintTerm(TermOperators): + data: Union[LinkData, ClusterData] + id: str = Field(init=False) + + @model_validator(mode="before") + def fill_id(cls, v: Dict[str, Any]) -> Dict[str, Any]: + v["id"] = cls.generate_id(v["data"]) + return v + + @classmethod + def generate_id(cls, data: Union[Dict[str, str], LinkData, ClusterData]) -> str: + if isinstance(data, dict): + if "area1" in data: + return "%".join(sorted((data["area1"].lower(), data["area2"].lower()))) + return ".".join((data["area"].lower(), data["cluster"].lower())) + elif isinstance(data, LinkData): + return "%".join(sorted((data.area1.lower(), data.area2.lower()))) + return ".".join((data.area.lower(), data.cluster.lower())) + + +class BindingConstraintProperties(BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel): + enabled: Optional[bool] = None + time_step: Optional[BindingConstraintFrequency] = None + operator: Optional[BindingConstraintOperator] = None + comments: Optional[str] = None + filter_year_by_year: Optional[str] = None + filter_synthesis: Optional[str] = None + group: Optional[str] = None + + +class BindingConstraint: + def __init__( # type: ignore # TODO: Find a way to avoid circular imports + self, + name: str, + binding_constraint_service, + properties: Optional[BindingConstraintProperties] = None, + terms: Optional[List[ConstraintTerm]] = None, + ): + self._name = name + self._binding_constraint_service = binding_constraint_service + self._id = transform_name_to_id(name) + self._properties = properties or BindingConstraintProperties() + self._terms = {term.id: term for term in terms} if terms else {} + + @property + def name(self) -> str: + return self._name + + @property + def id(self) -> str: + return self._id + + @property + def properties(self) -> BindingConstraintProperties: + return self._properties + + def get_terms(self) -> Dict[str, ConstraintTerm]: + return self._terms + + def add_terms(self, terms: List[ConstraintTerm]) -> None: + added_terms = self._binding_constraint_service.add_constraint_terms(self, terms) + for term in added_terms: + self._terms[term.id] = term + + def delete_term(self, term: ConstraintTerm) -> None: + self._binding_constraint_service.delete_binding_constraint_term(self.id, term.id) + self._terms.pop(term.id) + + def update_properties(self, properties: BindingConstraintProperties) -> None: + new_properties = self._binding_constraint_service.update_binding_constraint_properties(self, properties) + self._properties = new_properties + + def get_less_term_matrix(self) -> pd.DataFrame: + return self._binding_constraint_service.get_constraint_matrix(self, ConstraintMatrixName.LESS_TERM) + + def get_equal_term_matrix(self) -> pd.DataFrame: + return self._binding_constraint_service.get_constraint_matrix(self, ConstraintMatrixName.EQUAL_TERM) + + def get_greater_term_matrix(self) -> pd.DataFrame: + return self._binding_constraint_service.get_constraint_matrix(self, ConstraintMatrixName.GREATER_TERM) + + def update_less_term_matrix(self, matrix: pd.DataFrame) -> None: + self._binding_constraint_service.update_constraint_matrix(self, ConstraintMatrixName.LESS_TERM, matrix) + + def update_equal_term_matrix(self, matrix: pd.DataFrame) -> None: + self._binding_constraint_service.update_constraint_matrix(self, ConstraintMatrixName.EQUAL_TERM, matrix) + + def update_greater_term_matrix(self, matrix: pd.DataFrame) -> None: + self._binding_constraint_service.update_constraint_matrix(self, ConstraintMatrixName.GREATER_TERM, matrix) diff --git a/src/antares/model/cluster.py b/src/antares/model/cluster.py index 8f6e1c6c..d34aeb6b 100644 --- a/src/antares/model/cluster.py +++ b/src/antares/model/cluster.py @@ -1,44 +1,42 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from typing import Optional - -from pydantic import BaseModel -from pydantic.alias_generators import to_camel - - -class ClusterProperties( - BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel -): - """ - Common properties for thermal and renewable clusters - """ - - # Activity status: - # - True: the plant may generate. - # - False: not yet commissioned, moth-balled, etc. - enabled: Optional[bool] = None - - unit_count: Optional[int] = None - nominal_capacity: Optional[float] = None - - @property - def installed_capacity(self) -> Optional[float]: - if self.unit_count is None or self.nominal_capacity is None: - return None - return self.unit_count * self.nominal_capacity - - @property - def enabled_capacity(self) -> Optional[float]: - if self.enabled is None or self.installed_capacity is None: - return None - return self.enabled * self.installed_capacity +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from typing import Optional + +from pydantic import BaseModel +from pydantic.alias_generators import to_camel + + +class ClusterProperties(BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel): + """ + Common properties for thermal and renewable clusters + """ + + # Activity status: + # - True: the plant may generate. + # - False: not yet commissioned, moth-balled, etc. + enabled: Optional[bool] = None + + unit_count: Optional[int] = None + nominal_capacity: Optional[float] = None + + @property + def installed_capacity(self) -> Optional[float]: + if self.unit_count is None or self.nominal_capacity is None: + return None + return self.unit_count * self.nominal_capacity + + @property + def enabled_capacity(self) -> Optional[float]: + if self.enabled is None or self.installed_capacity is None: + return None + return self.enabled * self.installed_capacity diff --git a/src/antares/model/commons.py b/src/antares/model/commons.py index ed59ef08..077fefda 100644 --- a/src/antares/model/commons.py +++ b/src/antares/model/commons.py @@ -1,28 +1,28 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import List, Set - - -class FilterOption(Enum): - HOURLY = "hourly" - DAILY = "daily" - WEEKLY = "weekly" - MONTHLY = "monthly" - ANNUAL = "annual" - - -def sort_filter_values(filter_options: Set[FilterOption]) -> List[str]: - filter_defaults = ["hourly", "daily", "weekly", "monthly", "annual"] - filter_values = [filter_option.value for filter_option in filter_options] - return sorted(filter_values, key=lambda x: filter_defaults.index(x)) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import List, Set + + +class FilterOption(Enum): + HOURLY = "hourly" + DAILY = "daily" + WEEKLY = "weekly" + MONTHLY = "monthly" + ANNUAL = "annual" + + +def sort_filter_values(filter_options: Set[FilterOption]) -> List[str]: + filter_defaults = ["hourly", "daily", "weekly", "monthly", "annual"] + filter_values = [filter_option.value for filter_option in filter_options] + return sorted(filter_values, key=lambda x: filter_defaults.index(x)) diff --git a/src/antares/model/hydro.py b/src/antares/model/hydro.py index eed2e3fe..9e23faae 100644 --- a/src/antares/model/hydro.py +++ b/src/antares/model/hydro.py @@ -1,171 +1,149 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional, Dict, Any - -import pandas as pd -from pydantic import BaseModel, computed_field -from pydantic.alias_generators import to_camel - -from antares.tools.ini_tool import check_if_none - - -class HydroMatrixName(Enum): - SERIES_ROR = "ror" - SERIES_MOD = "mod" - SERIES_MIN_GEN = "mingen" - PREPRO_ENERGY = "energy" - COMMON_WATER_VALUES = "waterValues" - COMMON_RESERVOIR = "reservoir" - COMMON_MAX_POWER = "maxpower" - COMMON_INFLOW_PATTERN = "inflowPattern" - COMMON_CREDIT_MODULATIONS = "creditmodulations" - - -class HydroProperties( - BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel -): - """ - Properties of hydro system read from the configuration files. - - All aliases match the name of the corresponding field in the INI files. - """ - - inter_daily_breakdown: Optional[float] = None - intra_daily_modulation: Optional[float] = None - inter_monthly_breakdown: Optional[float] = None - reservoir: Optional[bool] = None - reservoir_capacity: Optional[float] = None - follow_load: Optional[bool] = None - use_water: Optional[bool] = None - hard_bounds: Optional[bool] = None - initialize_reservoir_date: Optional[int] = None - use_heuristic: Optional[bool] = None - power_to_level: Optional[bool] = None - use_leeway: Optional[bool] = None - leeway_low: Optional[float] = None - leeway_up: Optional[float] = None - pumping_efficiency: Optional[float] = None - - -class HydroPropertiesLocal(BaseModel): - def __init__( - self, - area_id: str, - hydro_properties: Optional[HydroProperties] = None, - **kwargs: Optional[Any], - ): - super().__init__(**kwargs) - self._area_id = area_id - hydro_properties = hydro_properties or HydroProperties() - self._inter_daily_breakdown = check_if_none( - hydro_properties.inter_daily_breakdown, 1 - ) - self._intra_daily_modulation = check_if_none( - hydro_properties.intra_daily_modulation, 24 - ) - self._inter_monthly_breakdown = check_if_none( - hydro_properties.inter_monthly_breakdown, 1 - ) - self._reservoir = check_if_none(hydro_properties.reservoir, False) - self._reservoir_capacity = check_if_none(hydro_properties.reservoir_capacity, 0) - self._follow_load = check_if_none(hydro_properties.follow_load, True) - self._use_water = check_if_none(hydro_properties.use_water, False) - self._hard_bounds = check_if_none(hydro_properties.hard_bounds, False) - self._initialize_reservoir_date = check_if_none( - hydro_properties.initialize_reservoir_date, 0 - ) - self._use_heuristic = check_if_none(hydro_properties.use_heuristic, True) - self._power_to_level = check_if_none(hydro_properties.power_to_level, False) - self._use_leeway = check_if_none(hydro_properties.use_leeway, False) - self._leeway_low = check_if_none(hydro_properties.leeway_low, 1) - self._leeway_up = check_if_none(hydro_properties.leeway_up, 1) - self._pumping_efficiency = check_if_none(hydro_properties.pumping_efficiency, 1) - - @computed_field # type: ignore[misc] - @property - def hydro_ini_fields(self) -> dict[str, dict[str, str]]: - return { - "inter-daily-breakdown": { - f"{self._area_id}": f"{self._inter_daily_breakdown:.6f}" - }, - "intra-daily-modulation": { - f"{self._area_id}": f"{self._intra_daily_modulation:.6f}" - }, - "inter-monthly-breakdown": { - f"{self._area_id}": f"{self._inter_monthly_breakdown:.6f}" - }, - "reservoir": {f"{self._area_id}": f"{self._reservoir}".lower()}, - "reservoir capacity": { - f"{self._area_id}": f"{self._reservoir_capacity:.6f}" - }, - "follow load": {f"{self._area_id}": f"{self._follow_load}".lower()}, - "use water": {f"{self._area_id}": f"{self._use_water}".lower()}, - "hard bounds": {f"{self._area_id}": f"{self._hard_bounds}".lower()}, - "initialize reservoir date": { - f"{self._area_id}": f"{self._initialize_reservoir_date}" - }, - "use heuristic": {f"{self._area_id}": f"{self._use_heuristic}".lower()}, - "power to level": {f"{self._area_id}": f"{self._power_to_level}".lower()}, - "use leeway": {f"{self._area_id}": f"{self._use_leeway}".lower()}, - "leeway low": {f"{self._area_id}": f"{self._leeway_low:.6f}"}, - "leeway up": {f"{self._area_id}": f"{self._leeway_up:.6f}"}, - "pumping efficiency": { - f"{self._area_id}": f"{self._pumping_efficiency:.6f}" - }, - } - - def yield_hydro_properties(self) -> HydroProperties: - return HydroProperties( - inter_daily_breakdown=self._inter_daily_breakdown, - intra_daily_modulation=self._intra_daily_modulation, - inter_monthly_breakdown=self._inter_monthly_breakdown, - reservoir=self._reservoir, - reservoir_capacity=self._reservoir_capacity, - follow_load=self._follow_load, - use_water=self._use_water, - hard_bounds=self._hard_bounds, - initialize_reservoir_date=self._initialize_reservoir_date, - use_heuristic=self._use_heuristic, - power_to_level=self._power_to_level, - use_leeway=self._use_leeway, - leeway_low=self._leeway_low, - leeway_up=self._leeway_up, - pumping_efficiency=self._pumping_efficiency, - ) - - -class Hydro: - def __init__( # type: ignore # - self, - service, - area_id: str, - properties: Optional[HydroProperties] = None, - matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]] = None, - ): - self._area_id = area_id - self._service = service - self._properties = properties - self._matrices = matrices - - @property - def area_id(self) -> str: - return self._area_id - - @property - def properties(self) -> Optional[HydroProperties]: - return self._properties - - @property - def matrices(self) -> Optional[Dict[HydroMatrixName, pd.DataFrame]]: - return self._matrices +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional, Dict, Any + +import pandas as pd +from pydantic import BaseModel, computed_field +from pydantic.alias_generators import to_camel + +from antares.tools.ini_tool import check_if_none + + +class HydroMatrixName(Enum): + SERIES_ROR = "ror" + SERIES_MOD = "mod" + SERIES_MIN_GEN = "mingen" + PREPRO_ENERGY = "energy" + COMMON_WATER_VALUES = "waterValues" + COMMON_RESERVOIR = "reservoir" + COMMON_MAX_POWER = "maxpower" + COMMON_INFLOW_PATTERN = "inflowPattern" + COMMON_CREDIT_MODULATIONS = "creditmodulations" + + +class HydroProperties(BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel): + """ + Properties of hydro system read from the configuration files. + + All aliases match the name of the corresponding field in the INI files. + """ + + inter_daily_breakdown: Optional[float] = None + intra_daily_modulation: Optional[float] = None + inter_monthly_breakdown: Optional[float] = None + reservoir: Optional[bool] = None + reservoir_capacity: Optional[float] = None + follow_load: Optional[bool] = None + use_water: Optional[bool] = None + hard_bounds: Optional[bool] = None + initialize_reservoir_date: Optional[int] = None + use_heuristic: Optional[bool] = None + power_to_level: Optional[bool] = None + use_leeway: Optional[bool] = None + leeway_low: Optional[float] = None + leeway_up: Optional[float] = None + pumping_efficiency: Optional[float] = None + + +class HydroPropertiesLocal(BaseModel): + def __init__( + self, + area_id: str, + hydro_properties: Optional[HydroProperties] = None, + **kwargs: Optional[Any], + ): + super().__init__(**kwargs) + self._area_id = area_id + hydro_properties = hydro_properties or HydroProperties() + self._inter_daily_breakdown = check_if_none(hydro_properties.inter_daily_breakdown, 1) + self._intra_daily_modulation = check_if_none(hydro_properties.intra_daily_modulation, 24) + self._inter_monthly_breakdown = check_if_none(hydro_properties.inter_monthly_breakdown, 1) + self._reservoir = check_if_none(hydro_properties.reservoir, False) + self._reservoir_capacity = check_if_none(hydro_properties.reservoir_capacity, 0) + self._follow_load = check_if_none(hydro_properties.follow_load, True) + self._use_water = check_if_none(hydro_properties.use_water, False) + self._hard_bounds = check_if_none(hydro_properties.hard_bounds, False) + self._initialize_reservoir_date = check_if_none(hydro_properties.initialize_reservoir_date, 0) + self._use_heuristic = check_if_none(hydro_properties.use_heuristic, True) + self._power_to_level = check_if_none(hydro_properties.power_to_level, False) + self._use_leeway = check_if_none(hydro_properties.use_leeway, False) + self._leeway_low = check_if_none(hydro_properties.leeway_low, 1) + self._leeway_up = check_if_none(hydro_properties.leeway_up, 1) + self._pumping_efficiency = check_if_none(hydro_properties.pumping_efficiency, 1) + + @computed_field # type: ignore[misc] + @property + def hydro_ini_fields(self) -> dict[str, dict[str, str]]: + return { + "inter-daily-breakdown": {f"{self._area_id}": f"{self._inter_daily_breakdown:.6f}"}, + "intra-daily-modulation": {f"{self._area_id}": f"{self._intra_daily_modulation:.6f}"}, + "inter-monthly-breakdown": {f"{self._area_id}": f"{self._inter_monthly_breakdown:.6f}"}, + "reservoir": {f"{self._area_id}": f"{self._reservoir}".lower()}, + "reservoir capacity": {f"{self._area_id}": f"{self._reservoir_capacity:.6f}"}, + "follow load": {f"{self._area_id}": f"{self._follow_load}".lower()}, + "use water": {f"{self._area_id}": f"{self._use_water}".lower()}, + "hard bounds": {f"{self._area_id}": f"{self._hard_bounds}".lower()}, + "initialize reservoir date": {f"{self._area_id}": f"{self._initialize_reservoir_date}"}, + "use heuristic": {f"{self._area_id}": f"{self._use_heuristic}".lower()}, + "power to level": {f"{self._area_id}": f"{self._power_to_level}".lower()}, + "use leeway": {f"{self._area_id}": f"{self._use_leeway}".lower()}, + "leeway low": {f"{self._area_id}": f"{self._leeway_low:.6f}"}, + "leeway up": {f"{self._area_id}": f"{self._leeway_up:.6f}"}, + "pumping efficiency": {f"{self._area_id}": f"{self._pumping_efficiency:.6f}"}, + } + + def yield_hydro_properties(self) -> HydroProperties: + return HydroProperties( + inter_daily_breakdown=self._inter_daily_breakdown, + intra_daily_modulation=self._intra_daily_modulation, + inter_monthly_breakdown=self._inter_monthly_breakdown, + reservoir=self._reservoir, + reservoir_capacity=self._reservoir_capacity, + follow_load=self._follow_load, + use_water=self._use_water, + hard_bounds=self._hard_bounds, + initialize_reservoir_date=self._initialize_reservoir_date, + use_heuristic=self._use_heuristic, + power_to_level=self._power_to_level, + use_leeway=self._use_leeway, + leeway_low=self._leeway_low, + leeway_up=self._leeway_up, + pumping_efficiency=self._pumping_efficiency, + ) + + +class Hydro: + def __init__( # type: ignore # + self, + service, + area_id: str, + properties: Optional[HydroProperties] = None, + matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]] = None, + ): + self._area_id = area_id + self._service = service + self._properties = properties + self._matrices = matrices + + @property + def area_id(self) -> str: + return self._area_id + + @property + def properties(self) -> Optional[HydroProperties]: + return self._properties + + @property + def matrices(self) -> Optional[Dict[HydroMatrixName, pd.DataFrame]]: + return self._matrices diff --git a/src/antares/model/link.py b/src/antares/model/link.py index c793061e..c4b22c50 100644 --- a/src/antares/model/link.py +++ b/src/antares/model/link.py @@ -1,225 +1,215 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional, Set, Any, Mapping - -from pydantic import BaseModel, computed_field - -from antares.model.area import Area -from antares.model.commons import FilterOption, sort_filter_values - - -class TransmissionCapacities(Enum): - ENABLED = "enabled" - DISABLED = "ignore" - INFINITE = "infinite" - - -class AssetType(Enum): - AC = "ac" - DC = "dc" - GAZ = "gaz" - VIRTUAL = "virt" - OTHER = "other" - - -class LinkStyle(Enum): - DOT = "dot" - PLAIN = "plain" - DASH = "dash" - DOT_DASH = "dotdash" - - -def link_aliasing(string: str) -> str: - return string.replace("_", "-") - - -class LinkProperties( - BaseModel, extra="forbid", populate_by_name=True, alias_generator=link_aliasing -): - """ - DTO for updating link properties - """ - - hurdles_cost: Optional[bool] = None - loop_flow: Optional[bool] = None - use_phase_shifter: Optional[bool] = None - transmission_capacities: Optional[TransmissionCapacities] = None - asset_type: Optional[AssetType] = None - display_comments: Optional[bool] = None - filter_synthesis: Optional[Set[FilterOption]] = None - filter_year_by_year: Optional[Set[FilterOption]] = None - - -# TODO update to use check_if_none -class LinkPropertiesLocal(BaseModel): - def __init__( - self, - link_properties: LinkProperties = LinkProperties(), - **kwargs: Optional[Any], - ): - super().__init__(**kwargs) - self._hurdles_cost = link_properties.hurdles_cost or False - self._loop_flow = link_properties.loop_flow or False - self._use_phase_shifter = link_properties.use_phase_shifter or False - self._transmission_capacities = ( - link_properties.transmission_capacities - if link_properties.transmission_capacities - else TransmissionCapacities.ENABLED - ) - self._asset_type = ( - link_properties.asset_type if link_properties.asset_type else AssetType.AC - ) - self._display_comments = link_properties.display_comments or True - self._filter_synthesis = link_properties.filter_synthesis or { - FilterOption.HOURLY, - FilterOption.DAILY, - FilterOption.WEEKLY, - FilterOption.MONTHLY, - FilterOption.ANNUAL, - } - self._filter_year_by_year = link_properties.filter_year_by_year or { - FilterOption.HOURLY, - FilterOption.DAILY, - FilterOption.WEEKLY, - FilterOption.MONTHLY, - FilterOption.ANNUAL, - } - - @computed_field # type: ignore[misc] - @property - def ini_fields(self) -> Mapping[str, str]: - return { - "hurdles-cost": f"{self._hurdles_cost}".lower(), - "loop-flow": f"{self._loop_flow}".lower(), - "use-phase-shifter": f"{self._use_phase_shifter}".lower(), - "transmission-capacities": f"{self._transmission_capacities.value}", - "asset-type": f"{self._asset_type.value}", - "display-comments": f"{self._display_comments}".lower(), - "filter-synthesis": ", ".join( - filter_value - for filter_value in sort_filter_values(self._filter_synthesis) - ), - "filter-year-by-year": ", ".join( - filter_value - for filter_value in sort_filter_values(self._filter_year_by_year) - ), - } - - def yield_link_properties(self) -> LinkProperties: - return LinkProperties( - hurdles_cost=self._hurdles_cost, - loop_flow=self._loop_flow, - use_phase_shifter=self._use_phase_shifter, - transmission_capacities=self._transmission_capacities, - asset_type=self._asset_type, - display_comments=self._display_comments, - filter_synthesis=self._filter_synthesis, - filter_year_by_year=self._filter_year_by_year, - ) - - -class LinkUi( - BaseModel, extra="forbid", populate_by_name=True, alias_generator=link_aliasing -): - """ - DTO for updating link UI - """ - - link_style: Optional[LinkStyle] = None - link_width: Optional[float] = None - colorr: Optional[int] = None - colorg: Optional[int] = None - colorb: Optional[int] = None - - -class LinkUiLocal(BaseModel): - def __init__( - self, - link_ui: LinkUi = LinkUi(), - **kwargs: Optional[Any], - ): - super().__init__(**kwargs) - self._link_style = link_ui.link_style if link_ui.link_style else LinkStyle.PLAIN - self._link_width = link_ui.link_width if link_ui.link_width is not None else 1 - self._colorr = link_ui.colorr if link_ui.colorr is not None else 112 - self._colorg = link_ui.colorg if link_ui.colorg is not None else 112 - self._colorb = link_ui.colorb if link_ui.colorb is not None else 112 - - @computed_field # type: ignore[misc] - @property - def ini_fields(self) -> Mapping[str, str]: - return { - "link-style": f"{self._link_style.value}", - "link-width": f"{self._link_width}", - "colorr": f"{self._colorr}", - "colorg": f"{self._colorg}", - "colorb": f"{self._colorb}", - } - - def yield_link_ui(self) -> LinkUi: - return LinkUi( - link_style=self._link_style, - link_width=self._link_width, - colorr=self._colorr, - colorg=self._colorg, - colorb=self._colorb, - ) - - -class Link: - def __init__( # type: ignore # TODO: Find a way to avoid circular imports - self, - area_from: Area, - area_to: Area, - link_service, - properties: Optional[LinkProperties] = None, - ui: Optional[LinkUi] = None, - ): - self._area_from = area_from - self._area_to = area_to - self._link_service = link_service - self._properties = properties or LinkProperties() - self._ui = ui or LinkUi() - - @property - def name(self) -> str: - return self._area_from.id + " / " + self._area_to.id - - @property - def area_from(self) -> Area: - return self._area_from - - @property - def area_to(self) -> Area: - return self._area_to - - @property - def properties(self) -> LinkProperties: - return self._properties - - @property - def ui(self) -> LinkUi: - return self._ui - - def update_properties(self, properties: LinkProperties) -> LinkProperties: - new_properties = self._link_service.update_link_properties(self, properties) - self._properties = new_properties - return new_properties - - def update_ui(self, ui: LinkUi) -> LinkUi: - new_ui = self._link_service.update_link_ui(self, ui) - self._ui = new_ui - return new_ui - - # todo: Add matrices +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional, Set, Any, Mapping + +from pydantic import BaseModel, computed_field + +from antares.model.area import Area +from antares.model.commons import FilterOption, sort_filter_values + + +class TransmissionCapacities(Enum): + ENABLED = "enabled" + DISABLED = "ignore" + INFINITE = "infinite" + + +class AssetType(Enum): + AC = "ac" + DC = "dc" + GAZ = "gaz" + VIRTUAL = "virt" + OTHER = "other" + + +class LinkStyle(Enum): + DOT = "dot" + PLAIN = "plain" + DASH = "dash" + DOT_DASH = "dotdash" + + +def link_aliasing(string: str) -> str: + return string.replace("_", "-") + + +class LinkProperties(BaseModel, extra="forbid", populate_by_name=True, alias_generator=link_aliasing): + """ + DTO for updating link properties + """ + + hurdles_cost: Optional[bool] = None + loop_flow: Optional[bool] = None + use_phase_shifter: Optional[bool] = None + transmission_capacities: Optional[TransmissionCapacities] = None + asset_type: Optional[AssetType] = None + display_comments: Optional[bool] = None + filter_synthesis: Optional[Set[FilterOption]] = None + filter_year_by_year: Optional[Set[FilterOption]] = None + + +# TODO update to use check_if_none +class LinkPropertiesLocal(BaseModel): + def __init__( + self, + link_properties: LinkProperties = LinkProperties(), + **kwargs: Optional[Any], + ): + super().__init__(**kwargs) + self._hurdles_cost = link_properties.hurdles_cost or False + self._loop_flow = link_properties.loop_flow or False + self._use_phase_shifter = link_properties.use_phase_shifter or False + self._transmission_capacities = ( + link_properties.transmission_capacities + if link_properties.transmission_capacities + else TransmissionCapacities.ENABLED + ) + self._asset_type = link_properties.asset_type if link_properties.asset_type else AssetType.AC + self._display_comments = link_properties.display_comments or True + self._filter_synthesis = link_properties.filter_synthesis or { + FilterOption.HOURLY, + FilterOption.DAILY, + FilterOption.WEEKLY, + FilterOption.MONTHLY, + FilterOption.ANNUAL, + } + self._filter_year_by_year = link_properties.filter_year_by_year or { + FilterOption.HOURLY, + FilterOption.DAILY, + FilterOption.WEEKLY, + FilterOption.MONTHLY, + FilterOption.ANNUAL, + } + + @computed_field # type: ignore[misc] + @property + def ini_fields(self) -> Mapping[str, str]: + return { + "hurdles-cost": f"{self._hurdles_cost}".lower(), + "loop-flow": f"{self._loop_flow}".lower(), + "use-phase-shifter": f"{self._use_phase_shifter}".lower(), + "transmission-capacities": f"{self._transmission_capacities.value}", + "asset-type": f"{self._asset_type.value}", + "display-comments": f"{self._display_comments}".lower(), + "filter-synthesis": ", ".join(filter_value for filter_value in sort_filter_values(self._filter_synthesis)), + "filter-year-by-year": ", ".join( + filter_value for filter_value in sort_filter_values(self._filter_year_by_year) + ), + } + + def yield_link_properties(self) -> LinkProperties: + return LinkProperties( + hurdles_cost=self._hurdles_cost, + loop_flow=self._loop_flow, + use_phase_shifter=self._use_phase_shifter, + transmission_capacities=self._transmission_capacities, + asset_type=self._asset_type, + display_comments=self._display_comments, + filter_synthesis=self._filter_synthesis, + filter_year_by_year=self._filter_year_by_year, + ) + + +class LinkUi(BaseModel, extra="forbid", populate_by_name=True, alias_generator=link_aliasing): + """ + DTO for updating link UI + """ + + link_style: Optional[LinkStyle] = None + link_width: Optional[float] = None + colorr: Optional[int] = None + colorg: Optional[int] = None + colorb: Optional[int] = None + + +class LinkUiLocal(BaseModel): + def __init__( + self, + link_ui: LinkUi = LinkUi(), + **kwargs: Optional[Any], + ): + super().__init__(**kwargs) + self._link_style = link_ui.link_style if link_ui.link_style else LinkStyle.PLAIN + self._link_width = link_ui.link_width if link_ui.link_width is not None else 1 + self._colorr = link_ui.colorr if link_ui.colorr is not None else 112 + self._colorg = link_ui.colorg if link_ui.colorg is not None else 112 + self._colorb = link_ui.colorb if link_ui.colorb is not None else 112 + + @computed_field # type: ignore[misc] + @property + def ini_fields(self) -> Mapping[str, str]: + return { + "link-style": f"{self._link_style.value}", + "link-width": f"{self._link_width}", + "colorr": f"{self._colorr}", + "colorg": f"{self._colorg}", + "colorb": f"{self._colorb}", + } + + def yield_link_ui(self) -> LinkUi: + return LinkUi( + link_style=self._link_style, + link_width=self._link_width, + colorr=self._colorr, + colorg=self._colorg, + colorb=self._colorb, + ) + + +class Link: + def __init__( # type: ignore # TODO: Find a way to avoid circular imports + self, + area_from: Area, + area_to: Area, + link_service, + properties: Optional[LinkProperties] = None, + ui: Optional[LinkUi] = None, + ): + self._area_from = area_from + self._area_to = area_to + self._link_service = link_service + self._properties = properties or LinkProperties() + self._ui = ui or LinkUi() + + @property + def name(self) -> str: + return self._area_from.id + " / " + self._area_to.id + + @property + def area_from(self) -> Area: + return self._area_from + + @property + def area_to(self) -> Area: + return self._area_to + + @property + def properties(self) -> LinkProperties: + return self._properties + + @property + def ui(self) -> LinkUi: + return self._ui + + def update_properties(self, properties: LinkProperties) -> LinkProperties: + new_properties = self._link_service.update_link_properties(self, properties) + self._properties = new_properties + return new_properties + + def update_ui(self, ui: LinkUi) -> LinkUi: + new_ui = self._link_service.update_link_ui(self, ui) + self._ui = new_ui + return new_ui + + # todo: Add matrices diff --git a/src/antares/model/misc_gen.py b/src/antares/model/misc_gen.py index b4f11792..7d1aef2f 100644 --- a/src/antares/model/misc_gen.py +++ b/src/antares/model/misc_gen.py @@ -1,18 +1,18 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - - -from antares.tools.time_series_tool import TimeSeries - - -class MiscGen(TimeSeries): - pass +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + + +from antares.tools.time_series_tool import TimeSeries + + +class MiscGen(TimeSeries): + pass diff --git a/src/antares/model/renewable.py b/src/antares/model/renewable.py index 2d36eae5..943b5dcf 100644 --- a/src/antares/model/renewable.py +++ b/src/antares/model/renewable.py @@ -1,169 +1,160 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional, Any - -import pandas as pd -from pydantic import BaseModel, computed_field - -from antares.model.cluster import ClusterProperties -from antares.tools.contents_tool import transform_name_to_id - - -class RenewableClusterGroup(Enum): - """ - Renewable cluster groups. - - The group can be any one of the following: - "Wind Onshore", "Wind Offshore", "Solar Thermal", "Solar PV", "Solar Rooftop", - "Other RES 1", "Other RES 2", "Other RES 3", or "Other RES 4". - If not specified, the renewable cluster will be part of the group "Other RES 1". - """ - - THERMAL_SOLAR = "Solar Thermal" - PV_SOLAR = "Solar PV" - ROOFTOP_SOLAR = "Solar Rooftop" - WIND_ON_SHORE = "Wind Onshore" - WIND_OFF_SHORE = "Wind Offshore" - OTHER1 = "Other RES 1" - OTHER2 = "Other RES 2" - OTHER3 = "Other RES 3" - OTHER4 = "Other RES 4" - - -class TimeSeriesInterpretation(Enum): - """ - Timeseries mode: - - - Power generation means that the unit of the timeseries is in MW, - - Production factor means that the unit of the timeseries is in p.u. - (between 0 and 1, 1 meaning the full installed capacity) - """ - - POWER_GENERATION = "power-generation" - PRODUCTION_FACTOR = "production-factor" - - -class RenewableClusterProperties(ClusterProperties): - """ - Properties of a renewable cluster read from the configuration files. - """ - - group: Optional[RenewableClusterGroup] = None - ts_interpretation: Optional[TimeSeriesInterpretation] = None - - -# TODO update to use check_if_none -class RenewableClusterPropertiesLocal( - BaseModel, -): - def __init__( - self, - renewable_name: str, - renewable_cluster_properties: Optional[RenewableClusterProperties] = None, - **kwargs: Optional[Any], - ): - super().__init__(**kwargs) - renewable_cluster_properties = ( - renewable_cluster_properties or RenewableClusterProperties() - ) - self._renewable_name = renewable_name - self._enabled = ( - renewable_cluster_properties.enabled - if renewable_cluster_properties.enabled is not None - else True - ) - self._unit_count = ( - renewable_cluster_properties.unit_count - if renewable_cluster_properties.unit_count is not None - else 1 - ) - self._nominal_capacity = ( - renewable_cluster_properties.nominal_capacity - if renewable_cluster_properties.nominal_capacity is not None - else 0 - ) - self._group = renewable_cluster_properties.group or RenewableClusterGroup.OTHER1 - self._ts_interpretation = ( - renewable_cluster_properties.ts_interpretation - or TimeSeriesInterpretation.POWER_GENERATION - ) - - @property - def renewable_name(self) -> str: - return self._renewable_name - - @computed_field # type: ignore[misc] - @property - def ini_fields(self) -> dict[str, dict[str, str]]: - return { - self._renewable_name: { - "name": self._renewable_name, - "group": self._group.value, - "enabled": f"{self._enabled}".lower(), - "nominalcapacity": f"{self._nominal_capacity:.6f}", - "unitcount": f"{self._unit_count}", - "ts-interpretation": self._ts_interpretation.value, - } - } - - def yield_renewable_cluster_properties(self) -> RenewableClusterProperties: - return RenewableClusterProperties( - enabled=self._enabled, - unit_count=self._unit_count, - nominal_capacity=self._nominal_capacity, - group=self._group, - ts_interpretation=self._ts_interpretation, - ) - - -class RenewableCluster: - def __init__( # type: ignore # TODO: Find a way to avoid circular imports - self, - renewable_service, - area_id: str, - name: str, - properties: Optional[RenewableClusterProperties] = None, - ): - self._area_id = area_id - self._renewable_service = renewable_service - self._name = name - self._id = transform_name_to_id(name) - self._properties = properties or RenewableClusterProperties() - - # TODO: Add matrices. - - @property - def area_id(self) -> str: - return self._area_id - - @property - def name(self) -> str: - return self._name - - @property - def id(self) -> str: - return self._id - - @property - def properties(self) -> RenewableClusterProperties: - return self._properties - - def update_properties(self, properties: RenewableClusterProperties) -> None: - new_properties = self._renewable_service.update_renewable_properties( - self, properties - ) - self._properties = new_properties - - def get_renewable_matrix(self) -> pd.DataFrame: - return self._renewable_service.get_renewable_matrix(self) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional, Any + +import pandas as pd +from pydantic import BaseModel, computed_field + +from antares.model.cluster import ClusterProperties +from antares.tools.contents_tool import transform_name_to_id + + +class RenewableClusterGroup(Enum): + """ + Renewable cluster groups. + + The group can be any one of the following: + "Wind Onshore", "Wind Offshore", "Solar Thermal", "Solar PV", "Solar Rooftop", + "Other RES 1", "Other RES 2", "Other RES 3", or "Other RES 4". + If not specified, the renewable cluster will be part of the group "Other RES 1". + """ + + THERMAL_SOLAR = "Solar Thermal" + PV_SOLAR = "Solar PV" + ROOFTOP_SOLAR = "Solar Rooftop" + WIND_ON_SHORE = "Wind Onshore" + WIND_OFF_SHORE = "Wind Offshore" + OTHER1 = "Other RES 1" + OTHER2 = "Other RES 2" + OTHER3 = "Other RES 3" + OTHER4 = "Other RES 4" + + +class TimeSeriesInterpretation(Enum): + """ + Timeseries mode: + + - Power generation means that the unit of the timeseries is in MW, + - Production factor means that the unit of the timeseries is in p.u. + (between 0 and 1, 1 meaning the full installed capacity) + """ + + POWER_GENERATION = "power-generation" + PRODUCTION_FACTOR = "production-factor" + + +class RenewableClusterProperties(ClusterProperties): + """ + Properties of a renewable cluster read from the configuration files. + """ + + group: Optional[RenewableClusterGroup] = None + ts_interpretation: Optional[TimeSeriesInterpretation] = None + + +# TODO update to use check_if_none +class RenewableClusterPropertiesLocal( + BaseModel, +): + def __init__( + self, + renewable_name: str, + renewable_cluster_properties: Optional[RenewableClusterProperties] = None, + **kwargs: Optional[Any], + ): + super().__init__(**kwargs) + renewable_cluster_properties = renewable_cluster_properties or RenewableClusterProperties() + self._renewable_name = renewable_name + self._enabled = ( + renewable_cluster_properties.enabled if renewable_cluster_properties.enabled is not None else True + ) + self._unit_count = ( + renewable_cluster_properties.unit_count if renewable_cluster_properties.unit_count is not None else 1 + ) + self._nominal_capacity = ( + renewable_cluster_properties.nominal_capacity + if renewable_cluster_properties.nominal_capacity is not None + else 0 + ) + self._group = renewable_cluster_properties.group or RenewableClusterGroup.OTHER1 + self._ts_interpretation = ( + renewable_cluster_properties.ts_interpretation or TimeSeriesInterpretation.POWER_GENERATION + ) + + @property + def renewable_name(self) -> str: + return self._renewable_name + + @computed_field # type: ignore[misc] + @property + def ini_fields(self) -> dict[str, dict[str, str]]: + return { + self._renewable_name: { + "name": self._renewable_name, + "group": self._group.value, + "enabled": f"{self._enabled}".lower(), + "nominalcapacity": f"{self._nominal_capacity:.6f}", + "unitcount": f"{self._unit_count}", + "ts-interpretation": self._ts_interpretation.value, + } + } + + def yield_renewable_cluster_properties(self) -> RenewableClusterProperties: + return RenewableClusterProperties( + enabled=self._enabled, + unit_count=self._unit_count, + nominal_capacity=self._nominal_capacity, + group=self._group, + ts_interpretation=self._ts_interpretation, + ) + + +class RenewableCluster: + def __init__( # type: ignore # TODO: Find a way to avoid circular imports + self, + renewable_service, + area_id: str, + name: str, + properties: Optional[RenewableClusterProperties] = None, + ): + self._area_id = area_id + self._renewable_service = renewable_service + self._name = name + self._id = transform_name_to_id(name) + self._properties = properties or RenewableClusterProperties() + + # TODO: Add matrices. + + @property + def area_id(self) -> str: + return self._area_id + + @property + def name(self) -> str: + return self._name + + @property + def id(self) -> str: + return self._id + + @property + def properties(self) -> RenewableClusterProperties: + return self._properties + + def update_properties(self, properties: RenewableClusterProperties) -> None: + new_properties = self._renewable_service.update_renewable_properties(self, properties) + self._properties = new_properties + + def get_renewable_matrix(self) -> pd.DataFrame: + return self._renewable_service.get_renewable_matrix(self) diff --git a/src/antares/model/reserves.py b/src/antares/model/reserves.py index b74a0fab..0d9aee30 100644 --- a/src/antares/model/reserves.py +++ b/src/antares/model/reserves.py @@ -1,18 +1,18 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - - -from antares.tools.time_series_tool import TimeSeries - - -class Reserves(TimeSeries): - pass +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + + +from antares.tools.time_series_tool import TimeSeries + + +class Reserves(TimeSeries): + pass diff --git a/src/antares/model/settings/__init__.py b/src/antares/model/settings/__init__.py index c33bbc12..41c267a7 100644 --- a/src/antares/model/settings/__init__.py +++ b/src/antares/model/settings/__init__.py @@ -1,25 +1,25 @@ -from typing import Optional, Dict - -from pydantic import BaseModel - -from antares.model.settings.adequacy_patch import AdequacyPatchProperties -from antares.model.settings.advanced_parameters import AdvancedProperties -from antares.model.settings.general import GeneralProperties -from antares.model.settings.optimization import OptimizationProperties -from antares.model.settings.thematic_trimming import ThematicTrimming -from antares.model.settings.time_series import TimeSeriesProperties - - -class PlaylistData(BaseModel): - status: bool - weight: float - - -class StudySettings(BaseModel): - general_properties: Optional[GeneralProperties] = None - thematic_trimming: Optional[ThematicTrimming] = None - time_series_properties: Optional[TimeSeriesProperties] = None - adequacy_patch_properties: Optional[AdequacyPatchProperties] = None - advanced_properties: Optional[AdvancedProperties] = None - optimization_properties: Optional[OptimizationProperties] = None - playlist: Optional[Dict[str, PlaylistData]] = None +from typing import Optional, Dict + +from pydantic import BaseModel + +from antares.model.settings.adequacy_patch import AdequacyPatchProperties +from antares.model.settings.advanced_parameters import AdvancedProperties +from antares.model.settings.general import GeneralProperties +from antares.model.settings.optimization import OptimizationProperties +from antares.model.settings.thematic_trimming import ThematicTrimming +from antares.model.settings.time_series import TimeSeriesProperties + + +class PlaylistData(BaseModel): + status: bool + weight: float + + +class StudySettings(BaseModel): + general_properties: Optional[GeneralProperties] = None + thematic_trimming: Optional[ThematicTrimming] = None + time_series_properties: Optional[TimeSeriesProperties] = None + adequacy_patch_properties: Optional[AdequacyPatchProperties] = None + advanced_properties: Optional[AdvancedProperties] = None + optimization_properties: Optional[OptimizationProperties] = None + playlist: Optional[Dict[str, PlaylistData]] = None diff --git a/src/antares/model/settings/adequacy_patch.py b/src/antares/model/settings/adequacy_patch.py index 1c6050f2..499e2e62 100644 --- a/src/antares/model/settings/adequacy_patch.py +++ b/src/antares/model/settings/adequacy_patch.py @@ -1,38 +1,36 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional - -from pydantic import BaseModel -from pydantic.alias_generators import to_camel - - -class PriceTakingOrder(Enum): - DENS = "DENS" - LOAD = "Load" - - -class AdequacyPatchProperties(BaseModel, alias_generator=to_camel): - # version 830 - enable_adequacy_patch: Optional[bool] = None - ntc_from_physical_areas_out_to_physical_areas_in_adequacy_patch: Optional[bool] = ( - None - ) - ntc_between_physical_areas_out_adequacy_patch: Optional[bool] = None - # version 850 - price_taking_order: Optional[PriceTakingOrder] = None - include_hurdle_cost_csr: Optional[bool] = None - check_csr_cost_function: Optional[bool] = None - threshold_initiate_curtailment_sharing_rule: Optional[int] = None - threshold_display_local_matching_rule_violations: Optional[int] = None - threshold_csr_variable_bounds_relaxation: Optional[int] = None +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional + +from pydantic import BaseModel +from pydantic.alias_generators import to_camel + + +class PriceTakingOrder(Enum): + DENS = "DENS" + LOAD = "Load" + + +class AdequacyPatchProperties(BaseModel, alias_generator=to_camel): + # version 830 + enable_adequacy_patch: Optional[bool] = None + ntc_from_physical_areas_out_to_physical_areas_in_adequacy_patch: Optional[bool] = None + ntc_between_physical_areas_out_adequacy_patch: Optional[bool] = None + # version 850 + price_taking_order: Optional[PriceTakingOrder] = None + include_hurdle_cost_csr: Optional[bool] = None + check_csr_cost_function: Optional[bool] = None + threshold_initiate_curtailment_sharing_rule: Optional[int] = None + threshold_display_local_matching_rule_violations: Optional[int] = None + threshold_csr_variable_bounds_relaxation: Optional[int] = None diff --git a/src/antares/model/settings/advanced_parameters.py b/src/antares/model/settings/advanced_parameters.py index 298397dc..3909ccdf 100644 --- a/src/antares/model/settings/advanced_parameters.py +++ b/src/antares/model/settings/advanced_parameters.py @@ -1,93 +1,93 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional - -from pydantic import BaseModel -from pydantic.alias_generators import to_camel - - -class InitialReservoirLevel(Enum): - COLD_START = "cold start" - HOT_START = "hot start" - - -class HydroHeuristicPolicy(Enum): - ACCOMMODATE_RULES_CURVES = "accommodate rule curves" - MAXIMIZE_GENERATION = "maximize generation" - - -class HydroPricingMode(Enum): - FAST = "fast" - ACCURATE = "accurate" - - -class PowerFluctuation(Enum): - FREE_MODULATIONS = "free modulations" - MINIMIZE_EXCURSIONS = "minimize excursions" - MINIMIZE_RAMPING = "minimize ramping" - - -class SheddingPolicy(Enum): - SHAVE_PEAKS = "shave peaks" - MINIMIZE_DURATION = "minimize duration" - - -class ReserveManagement(Enum): - GLOBAL = "global" - - -class UnitCommitmentMode(Enum): - FAST = "fast" - ACCURATE = "accurate" - MILP = "milp" - - -class SimulationCore(Enum): - MINIMUM = "minimum" - LOW = "low" - MEDIUM = "medium" - HIGH = "high" - MAXIMUM = "maximum" - - -class RenewableGenerationModeling(Enum): - AGGREGATED = "aggregated" - CLUSTERS = "clusters" - - -class AdvancedProperties(BaseModel, alias_generator=to_camel): - # Advanced parameters - accuracy_on_correlation: Optional[str] = None - # Other preferences - initial_reservoir_levels: Optional[InitialReservoirLevel] = None - power_fluctuations: Optional[PowerFluctuation] = None - shedding_policy: Optional[SheddingPolicy] = None - hydro_pricing_mode: Optional[HydroPricingMode] = None - hydro_heuristic_policy: Optional[HydroHeuristicPolicy] = None - unit_commitment_mode: Optional[UnitCommitmentMode] = None - number_of_cores_mode: Optional[SimulationCore] = None - day_ahead_reserve_management: Optional[ReserveManagement] = None - renewable_generation_modelling: Optional[RenewableGenerationModeling] = None - # Seeds - seed_tsgen_wind: Optional[int] = None - seed_tsgen_load: Optional[int] = None - seed_tsgen_hydro: Optional[int] = None - seed_tsgen_thermal: Optional[int] = None - seed_tsgen_solar: Optional[int] = None - seed_tsnumbers: Optional[int] = None - seed_unsupplied_energy_costs: Optional[int] = None - seed_spilled_energy_costs: Optional[int] = None - seed_thermal_costs: Optional[int] = None - seed_hydro_costs: Optional[int] = None - seed_initial_reservoir_levels: Optional[int] = None +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional + +from pydantic import BaseModel +from pydantic.alias_generators import to_camel + + +class InitialReservoirLevel(Enum): + COLD_START = "cold start" + HOT_START = "hot start" + + +class HydroHeuristicPolicy(Enum): + ACCOMMODATE_RULES_CURVES = "accommodate rule curves" + MAXIMIZE_GENERATION = "maximize generation" + + +class HydroPricingMode(Enum): + FAST = "fast" + ACCURATE = "accurate" + + +class PowerFluctuation(Enum): + FREE_MODULATIONS = "free modulations" + MINIMIZE_EXCURSIONS = "minimize excursions" + MINIMIZE_RAMPING = "minimize ramping" + + +class SheddingPolicy(Enum): + SHAVE_PEAKS = "shave peaks" + MINIMIZE_DURATION = "minimize duration" + + +class ReserveManagement(Enum): + GLOBAL = "global" + + +class UnitCommitmentMode(Enum): + FAST = "fast" + ACCURATE = "accurate" + MILP = "milp" + + +class SimulationCore(Enum): + MINIMUM = "minimum" + LOW = "low" + MEDIUM = "medium" + HIGH = "high" + MAXIMUM = "maximum" + + +class RenewableGenerationModeling(Enum): + AGGREGATED = "aggregated" + CLUSTERS = "clusters" + + +class AdvancedProperties(BaseModel, alias_generator=to_camel): + # Advanced parameters + accuracy_on_correlation: Optional[str] = None + # Other preferences + initial_reservoir_levels: Optional[InitialReservoirLevel] = None + power_fluctuations: Optional[PowerFluctuation] = None + shedding_policy: Optional[SheddingPolicy] = None + hydro_pricing_mode: Optional[HydroPricingMode] = None + hydro_heuristic_policy: Optional[HydroHeuristicPolicy] = None + unit_commitment_mode: Optional[UnitCommitmentMode] = None + number_of_cores_mode: Optional[SimulationCore] = None + day_ahead_reserve_management: Optional[ReserveManagement] = None + renewable_generation_modelling: Optional[RenewableGenerationModeling] = None + # Seeds + seed_tsgen_wind: Optional[int] = None + seed_tsgen_load: Optional[int] = None + seed_tsgen_hydro: Optional[int] = None + seed_tsgen_thermal: Optional[int] = None + seed_tsgen_solar: Optional[int] = None + seed_tsnumbers: Optional[int] = None + seed_unsupplied_energy_costs: Optional[int] = None + seed_spilled_energy_costs: Optional[int] = None + seed_thermal_costs: Optional[int] = None + seed_hydro_costs: Optional[int] = None + seed_initial_reservoir_levels: Optional[int] = None diff --git a/src/antares/model/settings/general.py b/src/antares/model/settings/general.py index 9a169ca0..ad9a6a78 100644 --- a/src/antares/model/settings/general.py +++ b/src/antares/model/settings/general.py @@ -1,74 +1,74 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from typing import Optional - -from pydantic import BaseModel -from pydantic.alias_generators import to_camel - -from antares.tools.contents_tool import EnumIgnoreCase - - -class Mode(EnumIgnoreCase): - ECONOMY = "economy" - ADEQUACY = "adequacy" - DRAFT = "draft" - - -class Month(EnumIgnoreCase): - JANUARY = "january" - FEBRUARY = "february" - MARCH = "march" - APRIL = "april" - MAY = "may" - JUNE = "june" - JULY = "july" - AUGUST = "august" - SEPTEMBER = "september" - OCTOBER = "october" - NOVEMBER = "november" - DECEMBER = "december" - - -class WeekDay(EnumIgnoreCase): - MONDAY = "monday" - TUESDAY = "tuesday" - WEDNESDAY = "wednesday" - THURSDAY = "thursday" - FRIDAY = "friday" - SATURDAY = "saturday" - SUNDAY = "sunday" - - -class BuildingMode(EnumIgnoreCase): - AUTOMATIC = "automatic" - CUSTOM = "custom" - DERATED = "derated" - - -class GeneralProperties(BaseModel, alias_generator=to_camel): - mode: Optional[Mode] = None - first_day: Optional[int] = None - last_day: Optional[int] = None - horizon: Optional[str] = None - first_month: Optional[Month] = None - first_week_day: Optional[WeekDay] = None - first_january: Optional[WeekDay] = None - leap_year: Optional[bool] = None - nb_years: Optional[int] = None - building_mode: Optional[BuildingMode] = None - selection_mode: Optional[bool] = None - year_by_year: Optional[bool] = None - simulation_synthesis: Optional[bool] = None - mc_scenario: Optional[bool] = None - geographic_trimming: Optional[bool] = None - thematic_trimming: Optional[bool] = None +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from typing import Optional + +from pydantic import BaseModel +from pydantic.alias_generators import to_camel + +from antares.tools.contents_tool import EnumIgnoreCase + + +class Mode(EnumIgnoreCase): + ECONOMY = "economy" + ADEQUACY = "adequacy" + DRAFT = "draft" + + +class Month(EnumIgnoreCase): + JANUARY = "january" + FEBRUARY = "february" + MARCH = "march" + APRIL = "april" + MAY = "may" + JUNE = "june" + JULY = "july" + AUGUST = "august" + SEPTEMBER = "september" + OCTOBER = "october" + NOVEMBER = "november" + DECEMBER = "december" + + +class WeekDay(EnumIgnoreCase): + MONDAY = "monday" + TUESDAY = "tuesday" + WEDNESDAY = "wednesday" + THURSDAY = "thursday" + FRIDAY = "friday" + SATURDAY = "saturday" + SUNDAY = "sunday" + + +class BuildingMode(EnumIgnoreCase): + AUTOMATIC = "automatic" + CUSTOM = "custom" + DERATED = "derated" + + +class GeneralProperties(BaseModel, alias_generator=to_camel): + mode: Optional[Mode] = None + first_day: Optional[int] = None + last_day: Optional[int] = None + horizon: Optional[str] = None + first_month: Optional[Month] = None + first_week_day: Optional[WeekDay] = None + first_january: Optional[WeekDay] = None + leap_year: Optional[bool] = None + nb_years: Optional[int] = None + building_mode: Optional[BuildingMode] = None + selection_mode: Optional[bool] = None + year_by_year: Optional[bool] = None + simulation_synthesis: Optional[bool] = None + mc_scenario: Optional[bool] = None + geographic_trimming: Optional[bool] = None + thematic_trimming: Optional[bool] = None diff --git a/src/antares/model/settings/optimization.py b/src/antares/model/settings/optimization.py index 712a4500..c9dce02c 100644 --- a/src/antares/model/settings/optimization.py +++ b/src/antares/model/settings/optimization.py @@ -1,58 +1,56 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional, Union - -from pydantic import BaseModel -from pydantic.alias_generators import to_camel - - -class LegacyTransmissionCapacities(Enum): - INFINITE = "infinite" - - -class TransmissionCapacities(Enum): - LOCAL_VALUES = "local-values" - NULL_FOR_ALL_LINKS = "null-for-all-links" - INFINITE_FOR_ALL_LINKS = "infinite-for-all-links" - NULL_FOR_PHYSICAL_LINKS = "null-for-physical-links" - INFINITE_FOR_PHYSICAL_LINKS = "infinite-for-physical-links" - - -class UnfeasibleProblemBehavior(Enum): - WARNING_DRY = "warning-dry" - WARNING_VERBOSE = "warning-verbose" - ERROR_DRY = "error-dry" - ERROR_VERBOSE = "error-verbose" - - -class SimplexOptimizationRange(Enum): - DAY = "day" - WEEK = "week" - - -class OptimizationProperties(BaseModel, alias_generator=to_camel): - binding_constraints: Optional[bool] = None - hurdle_costs: Optional[bool] = None - transmission_capacities: Union[ - bool, Union[LegacyTransmissionCapacities, TransmissionCapacities], None - ] = None - thermal_clusters_min_stable_power: Optional[bool] = None - thermal_clusters_min_ud_time: Optional[bool] = None - day_ahead_reserve: Optional[bool] = None - primary_reserve: Optional[bool] = None - strategic_reserve: Optional[bool] = None - spinning_reserve: Optional[bool] = None - export_mps: Union[bool, str, None] = None - unfeasible_problem_behavior: Optional[UnfeasibleProblemBehavior] = None - simplex_optimization_range: Optional[SimplexOptimizationRange] = None +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional, Union + +from pydantic import BaseModel +from pydantic.alias_generators import to_camel + + +class LegacyTransmissionCapacities(Enum): + INFINITE = "infinite" + + +class TransmissionCapacities(Enum): + LOCAL_VALUES = "local-values" + NULL_FOR_ALL_LINKS = "null-for-all-links" + INFINITE_FOR_ALL_LINKS = "infinite-for-all-links" + NULL_FOR_PHYSICAL_LINKS = "null-for-physical-links" + INFINITE_FOR_PHYSICAL_LINKS = "infinite-for-physical-links" + + +class UnfeasibleProblemBehavior(Enum): + WARNING_DRY = "warning-dry" + WARNING_VERBOSE = "warning-verbose" + ERROR_DRY = "error-dry" + ERROR_VERBOSE = "error-verbose" + + +class SimplexOptimizationRange(Enum): + DAY = "day" + WEEK = "week" + + +class OptimizationProperties(BaseModel, alias_generator=to_camel): + binding_constraints: Optional[bool] = None + hurdle_costs: Optional[bool] = None + transmission_capacities: Union[bool, Union[LegacyTransmissionCapacities, TransmissionCapacities], None] = None + thermal_clusters_min_stable_power: Optional[bool] = None + thermal_clusters_min_ud_time: Optional[bool] = None + day_ahead_reserve: Optional[bool] = None + primary_reserve: Optional[bool] = None + strategic_reserve: Optional[bool] = None + spinning_reserve: Optional[bool] = None + export_mps: Union[bool, str, None] = None + unfeasible_problem_behavior: Optional[UnfeasibleProblemBehavior] = None + simplex_optimization_range: Optional[SimplexOptimizationRange] = None diff --git a/src/antares/model/settings/thematic_trimming.py b/src/antares/model/settings/thematic_trimming.py index 4bc83df6..6f350506 100644 --- a/src/antares/model/settings/thematic_trimming.py +++ b/src/antares/model/settings/thematic_trimming.py @@ -1,123 +1,123 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from typing import Optional - -from pydantic import BaseModel -from pydantic.alias_generators import to_camel - - -class ThematicTrimming(BaseModel, alias_generator=to_camel): - """ - This class manages the configuration of result filtering in a simulation. - - This table allows the user to enable or disable specific variables before running a simulation. - """ - - ov_cost: Optional[bool] = None - op_cost: Optional[bool] = None - mrg_price: Optional[bool] = None - co2_emis: Optional[bool] = None - dtg_by_plant: Optional[bool] = None - balance: Optional[bool] = None - row_bal: Optional[bool] = None - psp: Optional[bool] = None - misc_ndg: Optional[bool] = None - load: Optional[bool] = None - h_ror: Optional[bool] = None - wind: Optional[bool] = None - solar: Optional[bool] = None - nuclear: Optional[bool] = None - lignite: Optional[bool] = None - coal: Optional[bool] = None - gas: Optional[bool] = None - oil: Optional[bool] = None - mix_fuel: Optional[bool] = None - misc_dtg: Optional[bool] = None - h_stor: Optional[bool] = None - h_pump: Optional[bool] = None - h_lev: Optional[bool] = None - h_infl: Optional[bool] = None - h_ovfl: Optional[bool] = None - h_val: Optional[bool] = None - h_cost: Optional[bool] = None - unsp_enrg: Optional[bool] = None - spil_enrg: Optional[bool] = None - lold: Optional[bool] = None - lolp: Optional[bool] = None - avl_dtg: Optional[bool] = None - dtg_mrg: Optional[bool] = None - max_mrg: Optional[bool] = None - np_cost: Optional[bool] = None - np_cost_by_plant: Optional[bool] = None - nodu: Optional[bool] = None - nodu_by_plant: Optional[bool] = None - flow_lin: Optional[bool] = None - ucap_lin: Optional[bool] = None - loop_flow: Optional[bool] = None - flow_quad: Optional[bool] = None - cong_fee_alg: Optional[bool] = None - cong_fee_abs: Optional[bool] = None - marg_cost: Optional[bool] = None - cong_prob_plus: Optional[bool] = None - cong_prob_minus: Optional[bool] = None - hurdle_cost: Optional[bool] = None - # since v8.1 - res_generation_by_plant: Optional[bool] = None - misc_dtg_2: Optional[bool] = None - misc_dtg_3: Optional[bool] = None - misc_dtg_4: Optional[bool] = None - wind_offshore: Optional[bool] = None - wind_onshore: Optional[bool] = None - solar_concrt: Optional[bool] = None - solar_pv: Optional[bool] = None - solar_rooft: Optional[bool] = None - renw_1: Optional[bool] = None - renw_2: Optional[bool] = None - renw_3: Optional[bool] = None - renw_4: Optional[bool] = None - # since v8.3 - dens: Optional[bool] = None - profit_by_plant: Optional[bool] = None - # since v8.6 - sts_inj_by_plant: Optional[bool] = None - sts_withdrawal_by_plant: Optional[bool] = None - sts_lvl_by_plant: Optional[bool] = None - psp_open_injection: Optional[bool] = None - psp_open_withdrawal: Optional[bool] = None - psp_open_level: Optional[bool] = None - psp_closed_injection: Optional[bool] = None - psp_closed_withdrawal: Optional[bool] = None - psp_closed_level: Optional[bool] = None - pondage_injection: Optional[bool] = None - pondage_withdrawal: Optional[bool] = None - pondage_level: Optional[bool] = None - battery_injection: Optional[bool] = None - battery_withdrawal: Optional[bool] = None - battery_level: Optional[bool] = None - other1_injection: Optional[bool] = None - other1_withdrawal: Optional[bool] = None - other1_level: Optional[bool] = None - other2_injection: Optional[bool] = None - other2_withdrawal: Optional[bool] = None - other2_level: Optional[bool] = None - other3_injection: Optional[bool] = None - other3_withdrawal: Optional[bool] = None - other3_level: Optional[bool] = None - other4_injection: Optional[bool] = None - other4_withdrawal: Optional[bool] = None - other4_level: Optional[bool] = None - other5_injection: Optional[bool] = None - other5_withdrawal: Optional[bool] = None - other5_level: Optional[bool] = None - # since v8.8 - sts_cashflow_by_cluster: Optional[bool] = None +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from typing import Optional + +from pydantic import BaseModel +from pydantic.alias_generators import to_camel + + +class ThematicTrimming(BaseModel, alias_generator=to_camel): + """ + This class manages the configuration of result filtering in a simulation. + + This table allows the user to enable or disable specific variables before running a simulation. + """ + + ov_cost: Optional[bool] = None + op_cost: Optional[bool] = None + mrg_price: Optional[bool] = None + co2_emis: Optional[bool] = None + dtg_by_plant: Optional[bool] = None + balance: Optional[bool] = None + row_bal: Optional[bool] = None + psp: Optional[bool] = None + misc_ndg: Optional[bool] = None + load: Optional[bool] = None + h_ror: Optional[bool] = None + wind: Optional[bool] = None + solar: Optional[bool] = None + nuclear: Optional[bool] = None + lignite: Optional[bool] = None + coal: Optional[bool] = None + gas: Optional[bool] = None + oil: Optional[bool] = None + mix_fuel: Optional[bool] = None + misc_dtg: Optional[bool] = None + h_stor: Optional[bool] = None + h_pump: Optional[bool] = None + h_lev: Optional[bool] = None + h_infl: Optional[bool] = None + h_ovfl: Optional[bool] = None + h_val: Optional[bool] = None + h_cost: Optional[bool] = None + unsp_enrg: Optional[bool] = None + spil_enrg: Optional[bool] = None + lold: Optional[bool] = None + lolp: Optional[bool] = None + avl_dtg: Optional[bool] = None + dtg_mrg: Optional[bool] = None + max_mrg: Optional[bool] = None + np_cost: Optional[bool] = None + np_cost_by_plant: Optional[bool] = None + nodu: Optional[bool] = None + nodu_by_plant: Optional[bool] = None + flow_lin: Optional[bool] = None + ucap_lin: Optional[bool] = None + loop_flow: Optional[bool] = None + flow_quad: Optional[bool] = None + cong_fee_alg: Optional[bool] = None + cong_fee_abs: Optional[bool] = None + marg_cost: Optional[bool] = None + cong_prob_plus: Optional[bool] = None + cong_prob_minus: Optional[bool] = None + hurdle_cost: Optional[bool] = None + # since v8.1 + res_generation_by_plant: Optional[bool] = None + misc_dtg_2: Optional[bool] = None + misc_dtg_3: Optional[bool] = None + misc_dtg_4: Optional[bool] = None + wind_offshore: Optional[bool] = None + wind_onshore: Optional[bool] = None + solar_concrt: Optional[bool] = None + solar_pv: Optional[bool] = None + solar_rooft: Optional[bool] = None + renw_1: Optional[bool] = None + renw_2: Optional[bool] = None + renw_3: Optional[bool] = None + renw_4: Optional[bool] = None + # since v8.3 + dens: Optional[bool] = None + profit_by_plant: Optional[bool] = None + # since v8.6 + sts_inj_by_plant: Optional[bool] = None + sts_withdrawal_by_plant: Optional[bool] = None + sts_lvl_by_plant: Optional[bool] = None + psp_open_injection: Optional[bool] = None + psp_open_withdrawal: Optional[bool] = None + psp_open_level: Optional[bool] = None + psp_closed_injection: Optional[bool] = None + psp_closed_withdrawal: Optional[bool] = None + psp_closed_level: Optional[bool] = None + pondage_injection: Optional[bool] = None + pondage_withdrawal: Optional[bool] = None + pondage_level: Optional[bool] = None + battery_injection: Optional[bool] = None + battery_withdrawal: Optional[bool] = None + battery_level: Optional[bool] = None + other1_injection: Optional[bool] = None + other1_withdrawal: Optional[bool] = None + other1_level: Optional[bool] = None + other2_injection: Optional[bool] = None + other2_withdrawal: Optional[bool] = None + other2_level: Optional[bool] = None + other3_injection: Optional[bool] = None + other3_withdrawal: Optional[bool] = None + other3_level: Optional[bool] = None + other4_injection: Optional[bool] = None + other4_withdrawal: Optional[bool] = None + other4_level: Optional[bool] = None + other5_injection: Optional[bool] = None + other5_withdrawal: Optional[bool] = None + other5_level: Optional[bool] = None + # since v8.8 + sts_cashflow_by_cluster: Optional[bool] = None diff --git a/src/antares/model/settings/time_series.py b/src/antares/model/settings/time_series.py index bd794ce2..3a3ab169 100644 --- a/src/antares/model/settings/time_series.py +++ b/src/antares/model/settings/time_series.py @@ -1,44 +1,44 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional - -from pydantic import BaseModel -from pydantic.alias_generators import to_camel - - -class SeasonCorrelation(Enum): - MONTHLY = "monthly" - ANNUAL = "annual" - - -class _Properties(BaseModel, alias_generator=to_camel): - stochastic_ts_status: Optional[bool] = None - number: Optional[int] = None - refresh: Optional[bool] = None - refresh_interval: Optional[int] = None - season_correlation: Optional[SeasonCorrelation] = None - store_in_input: Optional[bool] = None - store_in_output: Optional[bool] = None - intra_modal: Optional[bool] = None - inter_modal: Optional[bool] = None - - -class TimeSeriesProperties(BaseModel, alias_generator=to_camel): - load: Optional[_Properties] = None - hydro: Optional[_Properties] = None - thermal: Optional[_Properties] = None - wind: Optional[_Properties] = None - solar: Optional[_Properties] = None - renewables: Optional[_Properties] = None - ntc: Optional[_Properties] = None +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional + +from pydantic import BaseModel +from pydantic.alias_generators import to_camel + + +class SeasonCorrelation(Enum): + MONTHLY = "monthly" + ANNUAL = "annual" + + +class _Properties(BaseModel, alias_generator=to_camel): + stochastic_ts_status: Optional[bool] = None + number: Optional[int] = None + refresh: Optional[bool] = None + refresh_interval: Optional[int] = None + season_correlation: Optional[SeasonCorrelation] = None + store_in_input: Optional[bool] = None + store_in_output: Optional[bool] = None + intra_modal: Optional[bool] = None + inter_modal: Optional[bool] = None + + +class TimeSeriesProperties(BaseModel, alias_generator=to_camel): + load: Optional[_Properties] = None + hydro: Optional[_Properties] = None + thermal: Optional[_Properties] = None + wind: Optional[_Properties] = None + solar: Optional[_Properties] = None + renewables: Optional[_Properties] = None + ntc: Optional[_Properties] = None diff --git a/src/antares/model/solar.py b/src/antares/model/solar.py index 57aca133..d8257322 100644 --- a/src/antares/model/solar.py +++ b/src/antares/model/solar.py @@ -1,18 +1,18 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - - -from antares.tools.time_series_tool import TimeSeries - - -class Solar(TimeSeries): - pass +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + + +from antares.tools.time_series_tool import TimeSeries + + +class Solar(TimeSeries): + pass diff --git a/src/antares/model/st_storage.py b/src/antares/model/st_storage.py index bbf50557..63497249 100644 --- a/src/antares/model/st_storage.py +++ b/src/antares/model/st_storage.py @@ -1,206 +1,182 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional, Any - -import pandas as pd -from pydantic import BaseModel, computed_field -from pydantic.alias_generators import to_camel - -from antares.tools.contents_tool import transform_name_to_id -from antares.tools.ini_tool import check_if_none - - -class STStorageGroup(Enum): - # todo: this class should disappear with Simulator version 9.1 - PSP_OPEN = "PSP_open" - PSP_CLOSED = "PSP_closed" - PONDAGE = "Pondage" - BATTERY = "Battery" - OTHER1 = "Other1" - OTHER2 = "Other2" - OTHER3 = "Other3" - OTHER4 = "Other4" - OTHER5 = "Other5" - - -class STStorageMatrixName(Enum): - PMAX_INJECTION = "pmax_injection" - PMAX_WITHDRAWAL = "pmax_withdrawal" - LOWER_CURVE_RULE = "lower_rule_curve" - UPPER_RULE_CURVE = "upper_rule_curve" - INFLOWS = "inflows" - - -class STStorageProperties( - BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel -): - """ - Properties of a short-term storage system read from the configuration files. - - All aliases match the name of the corresponding field in the INI files. - """ - - group: Optional[STStorageGroup] = None - injection_nominal_capacity: Optional[float] = None - withdrawal_nominal_capacity: Optional[float] = None - reservoir_capacity: Optional[float] = None - efficiency: Optional[float] = None - initial_level: Optional[float] = None - initial_level_optim: Optional[bool] = None - # v880 - enabled: Optional[bool] = None - - -class STStoragePropertiesLocal(BaseModel): - def __init__( - self, - st_storage_name: str, - st_storage_properties: Optional[STStorageProperties] = None, - **kwargs: Optional[Any], - ): - super().__init__(**kwargs) - st_storage_properties = st_storage_properties or STStorageProperties() - self._st_storage_name = st_storage_name - self._group = check_if_none(st_storage_properties.group, STStorageGroup.OTHER1) - self._injection_nominal_capacity = check_if_none( - st_storage_properties.injection_nominal_capacity, 0 - ) - self._withdrawal_nominal_capacity = check_if_none( - st_storage_properties.withdrawal_nominal_capacity, 0 - ) - self._reservoir_capacity = check_if_none( - st_storage_properties.reservoir_capacity, 0 - ) - self._efficiency = check_if_none(st_storage_properties.efficiency, 1) - self._initial_level = check_if_none(st_storage_properties.initial_level, 0.5) - self._initial_level_optim = check_if_none( - st_storage_properties.initial_level_optim, False - ) - self._enabled = check_if_none(st_storage_properties.enabled, True) - - @property - def st_storage_name(self) -> str: - return self._st_storage_name - - @computed_field # type: ignore[misc] - @property - def list_ini_fields(self) -> dict[str, dict[str, str]]: - return { - f"{self._st_storage_name}": { - "name": self._st_storage_name, - "group": self._group.value, - "injectionnominalcapacity": f"{self._injection_nominal_capacity:.6f}", - "withdrawalnominalcapacity": f"{self._withdrawal_nominal_capacity:.6f}", - "reservoircapacity": f"{self._reservoir_capacity:.6f}", - "efficiency": f"{self._efficiency:.6f}", - "initiallevel": f"{self._initial_level:.6f}", - "initialleveloptim": f"{self._initial_level_optim}".lower(), - "enabled": f"{self._enabled}".lower(), - } - } - - def yield_st_storage_properties(self) -> STStorageProperties: - return STStorageProperties( - group=self._group, - injection_nominal_capacity=self._injection_nominal_capacity, - withdrawal_nominal_capacity=self._withdrawal_nominal_capacity, - reservoir_capacity=self._reservoir_capacity, - efficiency=self._efficiency, - initial_level=self._initial_level, - initial_level_optim=self._initial_level_optim, - enabled=self._enabled, - ) - - -class STStorage: - def __init__(self, storage_service, area_id: str, name: str, properties: Optional[STStorageProperties] = None): # type: ignore # TODO: Find a way to avoid circular imports - self._area_id = area_id - self._storage_service = storage_service - self._name = name - self._id = transform_name_to_id(name) - self._properties = properties or STStorageProperties() - - # TODO: Add matrices. - - @property - def area_id(self) -> str: - return self._area_id - - @property - def name(self) -> str: - return self._name - - @property - def id(self) -> str: - return self._id - - @property - def properties(self) -> STStorageProperties: - return self._properties - - def update_properties(self, properties: STStorageProperties) -> None: - new_properties = self._storage_service.update_st_storage_properties( - self, properties - ) - self._properties = new_properties - - def get_pmax_injection(self) -> pd.DataFrame: - return self._storage_service.get_storage_matrix( - self, STStorageMatrixName.PMAX_INJECTION - ) - - def get_pmax_withdrawal(self) -> pd.DataFrame: - return self._storage_service.get_storage_matrix( - self, STStorageMatrixName.PMAX_WITHDRAWAL - ) - - def get_lower_rule_curve(self) -> pd.DataFrame: - return self._storage_service.get_storage_matrix( - self, STStorageMatrixName.LOWER_CURVE_RULE - ) - - def get_upper_rule_curve(self) -> pd.DataFrame: - return self._storage_service.get_storage_matrix( - self, STStorageMatrixName.UPPER_RULE_CURVE - ) - - def get_storage_inflows(self) -> pd.DataFrame: - return self._storage_service.get_storage_matrix( - self, STStorageMatrixName.INFLOWS - ) - - def upload_pmax_injection(self, p_max_injection_matrix: pd.DataFrame) -> None: - return self._storage_service.upload_storage_matrix( - self, STStorageMatrixName.PMAX_INJECTION, p_max_injection_matrix - ) - - def upload_pmax_withdrawal(self, p_max_withdrawal_matrix: pd.DataFrame) -> None: - return self._storage_service.upload_storage_matrix( - self, STStorageMatrixName.PMAX_WITHDRAWAL, p_max_withdrawal_matrix - ) - - def upload_lower_rule_curve(self, lower_rule_curve_matrix: pd.DataFrame) -> None: - return self._storage_service.upload_storage_matrix( - self, STStorageMatrixName.LOWER_CURVE_RULE, lower_rule_curve_matrix - ) - - def upload_upper_rule_curve(self, upper_rule_curve_matrix: pd.DataFrame) -> None: - return self._storage_service.upload_storage_matrix( - self, STStorageMatrixName.UPPER_RULE_CURVE, upper_rule_curve_matrix - ) - - def upload_storage_inflows(self, inflows_matrix: pd.DataFrame) -> None: - return self._storage_service.upload_storage_matrix( - self, STStorageMatrixName.INFLOWS, inflows_matrix - ) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional, Any + +import pandas as pd +from pydantic import BaseModel, computed_field +from pydantic.alias_generators import to_camel + +from antares.tools.contents_tool import transform_name_to_id +from antares.tools.ini_tool import check_if_none + + +class STStorageGroup(Enum): + # todo: this class should disappear with Simulator version 9.1 + PSP_OPEN = "PSP_open" + PSP_CLOSED = "PSP_closed" + PONDAGE = "Pondage" + BATTERY = "Battery" + OTHER1 = "Other1" + OTHER2 = "Other2" + OTHER3 = "Other3" + OTHER4 = "Other4" + OTHER5 = "Other5" + + +class STStorageMatrixName(Enum): + PMAX_INJECTION = "pmax_injection" + PMAX_WITHDRAWAL = "pmax_withdrawal" + LOWER_CURVE_RULE = "lower_rule_curve" + UPPER_RULE_CURVE = "upper_rule_curve" + INFLOWS = "inflows" + + +class STStorageProperties(BaseModel, extra="forbid", populate_by_name=True, alias_generator=to_camel): + """ + Properties of a short-term storage system read from the configuration files. + + All aliases match the name of the corresponding field in the INI files. + """ + + group: Optional[STStorageGroup] = None + injection_nominal_capacity: Optional[float] = None + withdrawal_nominal_capacity: Optional[float] = None + reservoir_capacity: Optional[float] = None + efficiency: Optional[float] = None + initial_level: Optional[float] = None + initial_level_optim: Optional[bool] = None + # v880 + enabled: Optional[bool] = None + + +class STStoragePropertiesLocal(BaseModel): + def __init__( + self, + st_storage_name: str, + st_storage_properties: Optional[STStorageProperties] = None, + **kwargs: Optional[Any], + ): + super().__init__(**kwargs) + st_storage_properties = st_storage_properties or STStorageProperties() + self._st_storage_name = st_storage_name + self._group = check_if_none(st_storage_properties.group, STStorageGroup.OTHER1) + self._injection_nominal_capacity = check_if_none(st_storage_properties.injection_nominal_capacity, 0) + self._withdrawal_nominal_capacity = check_if_none(st_storage_properties.withdrawal_nominal_capacity, 0) + self._reservoir_capacity = check_if_none(st_storage_properties.reservoir_capacity, 0) + self._efficiency = check_if_none(st_storage_properties.efficiency, 1) + self._initial_level = check_if_none(st_storage_properties.initial_level, 0.5) + self._initial_level_optim = check_if_none(st_storage_properties.initial_level_optim, False) + self._enabled = check_if_none(st_storage_properties.enabled, True) + + @property + def st_storage_name(self) -> str: + return self._st_storage_name + + @computed_field # type: ignore[misc] + @property + def list_ini_fields(self) -> dict[str, dict[str, str]]: + return { + f"{self._st_storage_name}": { + "name": self._st_storage_name, + "group": self._group.value, + "injectionnominalcapacity": f"{self._injection_nominal_capacity:.6f}", + "withdrawalnominalcapacity": f"{self._withdrawal_nominal_capacity:.6f}", + "reservoircapacity": f"{self._reservoir_capacity:.6f}", + "efficiency": f"{self._efficiency:.6f}", + "initiallevel": f"{self._initial_level:.6f}", + "initialleveloptim": f"{self._initial_level_optim}".lower(), + "enabled": f"{self._enabled}".lower(), + } + } + + def yield_st_storage_properties(self) -> STStorageProperties: + return STStorageProperties( + group=self._group, + injection_nominal_capacity=self._injection_nominal_capacity, + withdrawal_nominal_capacity=self._withdrawal_nominal_capacity, + reservoir_capacity=self._reservoir_capacity, + efficiency=self._efficiency, + initial_level=self._initial_level, + initial_level_optim=self._initial_level_optim, + enabled=self._enabled, + ) + + +class STStorage: + def __init__(self, storage_service, area_id: str, name: str, properties: Optional[STStorageProperties] = None): # type: ignore # TODO: Find a way to avoid circular imports + self._area_id = area_id + self._storage_service = storage_service + self._name = name + self._id = transform_name_to_id(name) + self._properties = properties or STStorageProperties() + + # TODO: Add matrices. + + @property + def area_id(self) -> str: + return self._area_id + + @property + def name(self) -> str: + return self._name + + @property + def id(self) -> str: + return self._id + + @property + def properties(self) -> STStorageProperties: + return self._properties + + def update_properties(self, properties: STStorageProperties) -> None: + new_properties = self._storage_service.update_st_storage_properties(self, properties) + self._properties = new_properties + + def get_pmax_injection(self) -> pd.DataFrame: + return self._storage_service.get_storage_matrix(self, STStorageMatrixName.PMAX_INJECTION) + + def get_pmax_withdrawal(self) -> pd.DataFrame: + return self._storage_service.get_storage_matrix(self, STStorageMatrixName.PMAX_WITHDRAWAL) + + def get_lower_rule_curve(self) -> pd.DataFrame: + return self._storage_service.get_storage_matrix(self, STStorageMatrixName.LOWER_CURVE_RULE) + + def get_upper_rule_curve(self) -> pd.DataFrame: + return self._storage_service.get_storage_matrix(self, STStorageMatrixName.UPPER_RULE_CURVE) + + def get_storage_inflows(self) -> pd.DataFrame: + return self._storage_service.get_storage_matrix(self, STStorageMatrixName.INFLOWS) + + def upload_pmax_injection(self, p_max_injection_matrix: pd.DataFrame) -> None: + return self._storage_service.upload_storage_matrix( + self, STStorageMatrixName.PMAX_INJECTION, p_max_injection_matrix + ) + + def upload_pmax_withdrawal(self, p_max_withdrawal_matrix: pd.DataFrame) -> None: + return self._storage_service.upload_storage_matrix( + self, STStorageMatrixName.PMAX_WITHDRAWAL, p_max_withdrawal_matrix + ) + + def upload_lower_rule_curve(self, lower_rule_curve_matrix: pd.DataFrame) -> None: + return self._storage_service.upload_storage_matrix( + self, STStorageMatrixName.LOWER_CURVE_RULE, lower_rule_curve_matrix + ) + + def upload_upper_rule_curve(self, upper_rule_curve_matrix: pd.DataFrame) -> None: + return self._storage_service.upload_storage_matrix( + self, STStorageMatrixName.UPPER_RULE_CURVE, upper_rule_curve_matrix + ) + + def upload_storage_inflows(self, inflows_matrix: pd.DataFrame) -> None: + return self._storage_service.upload_storage_matrix(self, STStorageMatrixName.INFLOWS, inflows_matrix) diff --git a/src/antares/model/study.py b/src/antares/model/study.py index a709d494..4f6c9953 100644 --- a/src/antares/model/study.py +++ b/src/antares/model/study.py @@ -1,315 +1,305 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import logging -import os -import time -from pathlib import Path -from types import MappingProxyType -from typing import Optional, Dict, List - -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.api_conf.request_wrapper import RequestWrapper -from antares.config.local_configuration import LocalConfiguration -from antares.exceptions.exceptions import APIError, StudyCreationError -from antares.model.area import Area, AreaProperties, AreaUi -from antares.model.binding_constraint import ( - BindingConstraint, - BindingConstraintProperties, - ConstraintTerm, -) -from antares.model.link import Link, LinkUi, LinkProperties -from antares.model.settings import StudySettings -from antares.service.api_services.study_api import _returns_study_settings -from antares.service.base_services import BaseStudyService -from antares.service.service_factory import ServiceFactory, ServiceReader - -""" -The study module defines the data model for antares study. -It represents a power system involving areas and power flows -between these areas. -Optional attribute _api_id defined for studies being stored in web -_study_path if stored in a disk -""" - - -def create_study_api( - study_name: str, - version: str, - api_config: APIconf, - settings: Optional[StudySettings] = None, -) -> "Study": - """ - Args: - - study_name: antares study name to be created - version: antares version - api_config: host and token config for API - settings: study settings. If not provided, AntaresWeb will use its default values. - - Raises: - MissingTokenError if api_token is missing - StudyCreationError if an HTTP Exception occurs - """ - - session = api_config.set_up_api_conf() - wrapper = RequestWrapper(session) - base_url = f"{api_config.get_host()}/api/v1" - - try: - url = f"{base_url}/studies?name={study_name}&version={version}" - response = wrapper.post(url) - study_id = response.json() - - study_settings = _returns_study_settings( - base_url, study_id, wrapper, False, settings - ) - - except APIError as e: - raise StudyCreationError(study_name, e.message) from e - return Study( - study_name, version, ServiceFactory(api_config, study_id), study_settings - ) - - -def _verify_study_already_exists(study_directory: Path) -> None: - if os.path.exists(study_directory): - raise FileExistsError(f"Study {study_directory} already exists.") - - -def _directories_can_be_read(local_path: Path) -> None: - if local_path.is_dir(): - try: - for item in local_path.iterdir(): - if item.is_dir(): - next(item.iterdir()) - except PermissionError: - raise PermissionError(f"Some content cannot be accessed in {local_path}") - - -def create_study_local( - study_name: str, - version: str, - local_config: LocalConfiguration, - settings: Optional[StudySettings] = None, -) -> "Study": - """ - Create a directory structure for the study with empty files. - Args: - study_name: antares study name to be created - version: antares version for study - local_config: Local configuration options for example directory in which to story the study - settings: study settings. If not provided, AntaresWeb will use its default values. - - Raises: - FileExistsError if the study already exists in the given location - ValueError if the provided directory does not exist - - """ - - def _directory_not_exists(local_path: Path) -> None: - if local_path is None or not os.path.exists(local_path): - raise ValueError(f"Provided directory {local_path} does not exist.") - - _directory_not_exists(local_config.local_path) - - study_directory = local_config.local_path / study_name - - _verify_study_already_exists(study_directory) - - # Create the main study directory - os.makedirs(study_directory, exist_ok=True) - - # Create study.antares file with timestamps and study_name - antares_file_path = os.path.join(study_directory, "study.antares") - current_time = int(time.time()) - antares_content = f"""[antares] -version = {version} -caption = {study_name} -created = {current_time} -lastsave = {current_time} -author = Unknown -""" - with open(antares_file_path, "w") as antares_file: - antares_file.write(antares_content) - - # Create Desktop.ini file - desktop_ini_path = study_directory / "Desktop.ini" - desktop_ini_content = f"""[.ShellClassInfo] -IconFile = settings/resources/study.ico -IconIndex = 0 -InfoTip = Antares Study {version}: {study_name} -""" - with open(desktop_ini_path, "w") as desktop_ini_file: - desktop_ini_file.write(desktop_ini_content) - - # Create subdirectories - subdirectories = ["input", "layers", "output", "setting", "user"] - for subdirectory in subdirectories: - subdirectory_path = os.path.join(study_directory, subdirectory) - os.makedirs(subdirectory_path, exist_ok=True) - - logging.info(f"Study successfully created: {study_name}") - return Study( - name=study_name, - version=version, - service_factory=ServiceFactory(config=local_config, study_name=study_name), - settings=settings, - mode="create", - ) - - -def read_study_local( - study_name: str, version: str, local_config: LocalConfiguration -) -> "Study": - """ - Create a directory structure for the study with empty files. - Args: - study_name: antares study name to read - version: antares version for study - settings: study settings. If not provided, AntaresWeb will use its default values. - - Raises: - PermissionError if the study cannot be read - ValueError if the provided directory does not exist - - """ - - def _directory_not_exists(local_path: Path) -> None: - if local_path is None or not os.path.exists(local_path): - raise ValueError(f"Provided directory {local_path} does not exist.") - - _directory_not_exists(local_config.local_path) - study_directory = local_config.local_path / study_name - _directories_can_be_read(study_directory) - - return Study( - name=study_name, - version=version, - service_factory=ServiceReader(config=local_config, study_name=study_name), - mode="read", - ) - - -class Study: - def __init__( - self, - name: str, - version: str, - service_factory, - settings: Optional[StudySettings] = None, - mode: str = "create", - ): - self.name = name - self.version = version - if mode != "read": - self._study_service = service_factory.create_study_service() - self._area_service = service_factory.create_area_service() - self._link_service = service_factory.create_link_service() - self._binding_constraints_service = ( - service_factory.create_binding_constraints_service() - ) - self._settings = settings or StudySettings() - self._areas: Dict[str, Area] = dict() - self._links: Dict[str, Link] = dict() - self._binding_constraints: Dict[str, BindingConstraint] = dict() - else: - self._study_service = service_factory.read_study_service() - self._binding_constraints: Dict[str, BindingConstraint] = dict() - self._link_service = service_factory.create_link_service() - self._areas: Dict[str, Area] = dict() - self._links: Dict[str, Link] = dict() - - @property - def service(self) -> BaseStudyService: - return self._study_service - - def get_areas(self) -> MappingProxyType[str, Area]: - return MappingProxyType(dict(sorted(self._areas.items()))) - - def get_links(self) -> MappingProxyType[str, Link]: - return MappingProxyType(self._links) - - def get_settings(self) -> StudySettings: - return self._settings - - def get_binding_constraints(self) -> MappingProxyType[str, BindingConstraint]: - return MappingProxyType(self._binding_constraints) - - def create_area( - self, - area_name: str, - *, - properties: Optional[AreaProperties] = None, - ui: Optional[AreaUi] = None, - ) -> Area: - area = self._area_service.create_area(area_name, properties, ui) - self._areas[area.id] = area - return area - - def delete_area(self, area: Area) -> None: - self._area_service.delete_area(area) - self._areas.pop(area.id) - - def create_link( - self, - *, - area_from: Area, - area_to: Area, - properties: Optional[LinkProperties] = None, - ui: Optional[LinkUi] = None, - existing_areas: Optional[MappingProxyType[str, Area]] = None, - ) -> Link: - link = self._link_service.create_link( - area_from, area_to, properties, ui, existing_areas - ) - self._links[link.name] = link - return link - - def delete_link(self, link: Link) -> None: - self._link_service.delete_link(link) - self._links.pop(link.name) - - def create_binding_constraint( - self, - *, - name: str, - properties: Optional[BindingConstraintProperties] = None, - terms: Optional[List[ConstraintTerm]] = None, - less_term_matrix: Optional[pd.DataFrame] = None, - equal_term_matrix: Optional[pd.DataFrame] = None, - greater_term_matrix: Optional[pd.DataFrame] = None, - ) -> BindingConstraint: - constraint = self._binding_constraints_service.create_binding_constraint( - name, - properties, - terms, - less_term_matrix, - equal_term_matrix, - greater_term_matrix, - ) - self._binding_constraints[constraint.id] = constraint - return constraint - - def update_settings(self, settings: StudySettings) -> None: - new_settings = self._study_service.update_study_settings(settings) - if new_settings: - self._settings = new_settings - - def delete_binding_constraint(self, constraint: BindingConstraint) -> None: - self._study_service.delete_binding_constraint(constraint) - self._binding_constraints.pop(constraint.id) - - def delete(self, children: bool = False) -> None: - self._study_service.delete(children) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import logging +import os +import time +from pathlib import Path +from types import MappingProxyType +from typing import Optional, Dict, List + +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.api_conf.request_wrapper import RequestWrapper +from antares.config.local_configuration import LocalConfiguration +from antares.exceptions.exceptions import APIError, StudyCreationError +from antares.model.area import Area, AreaProperties, AreaUi +from antares.model.binding_constraint import ( + BindingConstraint, + BindingConstraintProperties, + ConstraintTerm, +) +from antares.model.link import Link, LinkUi, LinkProperties +from antares.model.settings import StudySettings +from antares.service.api_services.study_api import _returns_study_settings +from antares.service.base_services import BaseStudyService +from antares.service.service_factory import ServiceFactory, ServiceReader + +""" +The study module defines the data model for antares study. +It represents a power system involving areas and power flows +between these areas. +Optional attribute _api_id defined for studies being stored in web +_study_path if stored in a disk +""" + + +def create_study_api( + study_name: str, + version: str, + api_config: APIconf, + settings: Optional[StudySettings] = None, +) -> "Study": + """ + Args: + + study_name: antares study name to be created + version: antares version + api_config: host and token config for API + settings: study settings. If not provided, AntaresWeb will use its default values. + + Raises: + MissingTokenError if api_token is missing + StudyCreationError if an HTTP Exception occurs + """ + + session = api_config.set_up_api_conf() + wrapper = RequestWrapper(session) + base_url = f"{api_config.get_host()}/api/v1" + + try: + url = f"{base_url}/studies?name={study_name}&version={version}" + response = wrapper.post(url) + study_id = response.json() + + study_settings = _returns_study_settings(base_url, study_id, wrapper, False, settings) + + except APIError as e: + raise StudyCreationError(study_name, e.message) from e + return Study(study_name, version, ServiceFactory(api_config, study_id), study_settings) + + +def _verify_study_already_exists(study_directory: Path) -> None: + if os.path.exists(study_directory): + raise FileExistsError(f"Study {study_directory} already exists.") + + +def _directories_can_be_read(local_path: Path) -> None: + if local_path.is_dir(): + try: + for item in local_path.iterdir(): + if item.is_dir(): + next(item.iterdir()) + except PermissionError: + raise PermissionError(f"Some content cannot be accessed in {local_path}") + + +def create_study_local( + study_name: str, + version: str, + local_config: LocalConfiguration, + settings: Optional[StudySettings] = None, +) -> "Study": + """ + Create a directory structure for the study with empty files. + Args: + study_name: antares study name to be created + version: antares version for study + local_config: Local configuration options for example directory in which to story the study + settings: study settings. If not provided, AntaresWeb will use its default values. + + Raises: + FileExistsError if the study already exists in the given location + ValueError if the provided directory does not exist + + """ + + def _directory_not_exists(local_path: Path) -> None: + if local_path is None or not os.path.exists(local_path): + raise ValueError(f"Provided directory {local_path} does not exist.") + + _directory_not_exists(local_config.local_path) + + study_directory = local_config.local_path / study_name + + _verify_study_already_exists(study_directory) + + # Create the main study directory + os.makedirs(study_directory, exist_ok=True) + + # Create study.antares file with timestamps and study_name + antares_file_path = os.path.join(study_directory, "study.antares") + current_time = int(time.time()) + antares_content = f"""[antares] +version = {version} +caption = {study_name} +created = {current_time} +lastsave = {current_time} +author = Unknown +""" + with open(antares_file_path, "w") as antares_file: + antares_file.write(antares_content) + + # Create Desktop.ini file + desktop_ini_path = study_directory / "Desktop.ini" + desktop_ini_content = f"""[.ShellClassInfo] +IconFile = settings/resources/study.ico +IconIndex = 0 +InfoTip = Antares Study {version}: {study_name} +""" + with open(desktop_ini_path, "w") as desktop_ini_file: + desktop_ini_file.write(desktop_ini_content) + + # Create subdirectories + subdirectories = ["input", "layers", "output", "setting", "user"] + for subdirectory in subdirectories: + subdirectory_path = os.path.join(study_directory, subdirectory) + os.makedirs(subdirectory_path, exist_ok=True) + + logging.info(f"Study successfully created: {study_name}") + return Study( + name=study_name, + version=version, + service_factory=ServiceFactory(config=local_config, study_name=study_name), + settings=settings, + mode="create", + ) + + +def read_study_local(study_name: str, version: str, local_config: LocalConfiguration) -> "Study": + """ + Create a directory structure for the study with empty files. + Args: + study_name: antares study name to read + version: antares version for study + settings: study settings. If not provided, AntaresWeb will use its default values. + + Raises: + PermissionError if the study cannot be read + ValueError if the provided directory does not exist + + """ + + def _directory_not_exists(local_path: Path) -> None: + if local_path is None or not os.path.exists(local_path): + raise ValueError(f"Provided directory {local_path} does not exist.") + + _directory_not_exists(local_config.local_path) + study_directory = local_config.local_path / study_name + _directories_can_be_read(study_directory) + + return Study( + name=study_name, + version=version, + service_factory=ServiceReader(config=local_config, study_name=study_name), + mode="read", + ) + + +class Study: + def __init__( + self, + name: str, + version: str, + service_factory, + settings: Optional[StudySettings] = None, + mode: str = "create", + ): + self.name = name + self.version = version + if mode != "read": + self._study_service = service_factory.create_study_service() + self._area_service = service_factory.create_area_service() + self._link_service = service_factory.create_link_service() + self._binding_constraints_service = service_factory.create_binding_constraints_service() + self._settings = settings or StudySettings() + self._areas: Dict[str, Area] = dict() + self._links: Dict[str, Link] = dict() + self._binding_constraints: Dict[str, BindingConstraint] = dict() + else: + self._study_service = service_factory.read_study_service() + self._binding_constraints: Dict[str, BindingConstraint] = dict() + self._link_service = service_factory.create_link_service() + self._areas: Dict[str, Area] = dict() + self._links: Dict[str, Link] = dict() + + @property + def service(self) -> BaseStudyService: + return self._study_service + + def get_areas(self) -> MappingProxyType[str, Area]: + return MappingProxyType(dict(sorted(self._areas.items()))) + + def get_links(self) -> MappingProxyType[str, Link]: + return MappingProxyType(self._links) + + def get_settings(self) -> StudySettings: + return self._settings + + def get_binding_constraints(self) -> MappingProxyType[str, BindingConstraint]: + return MappingProxyType(self._binding_constraints) + + def create_area( + self, + area_name: str, + *, + properties: Optional[AreaProperties] = None, + ui: Optional[AreaUi] = None, + ) -> Area: + area = self._area_service.create_area(area_name, properties, ui) + self._areas[area.id] = area + return area + + def delete_area(self, area: Area) -> None: + self._area_service.delete_area(area) + self._areas.pop(area.id) + + def create_link( + self, + *, + area_from: Area, + area_to: Area, + properties: Optional[LinkProperties] = None, + ui: Optional[LinkUi] = None, + existing_areas: Optional[MappingProxyType[str, Area]] = None, + ) -> Link: + link = self._link_service.create_link(area_from, area_to, properties, ui, existing_areas) + self._links[link.name] = link + return link + + def delete_link(self, link: Link) -> None: + self._link_service.delete_link(link) + self._links.pop(link.name) + + def create_binding_constraint( + self, + *, + name: str, + properties: Optional[BindingConstraintProperties] = None, + terms: Optional[List[ConstraintTerm]] = None, + less_term_matrix: Optional[pd.DataFrame] = None, + equal_term_matrix: Optional[pd.DataFrame] = None, + greater_term_matrix: Optional[pd.DataFrame] = None, + ) -> BindingConstraint: + constraint = self._binding_constraints_service.create_binding_constraint( + name, + properties, + terms, + less_term_matrix, + equal_term_matrix, + greater_term_matrix, + ) + self._binding_constraints[constraint.id] = constraint + return constraint + + def update_settings(self, settings: StudySettings) -> None: + new_settings = self._study_service.update_study_settings(settings) + if new_settings: + self._settings = new_settings + + def delete_binding_constraint(self, constraint: BindingConstraint) -> None: + self._study_service.delete_binding_constraint(constraint) + self._binding_constraints.pop(constraint.id) + + def delete(self, children: bool = False) -> None: + self._study_service.delete(children) diff --git a/src/antares/model/thermal.py b/src/antares/model/thermal.py index 6f08c4e0..671cb954 100644 --- a/src/antares/model/thermal.py +++ b/src/antares/model/thermal.py @@ -1,330 +1,297 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from typing import Optional, Any - -import pandas as pd -from pydantic import BaseModel, computed_field - -from antares.model.cluster import ClusterProperties -from antares.tools.contents_tool import transform_name_to_id -from antares.tools.ini_tool import check_if_none - - -class LawOption(Enum): - """ - Law options used for series generation. - The UNIFORM `law` is used by default. - """ - - UNIFORM = "uniform" - GEOMETRIC = "geometric" - - -class ThermalClusterGroup(Enum): - NUCLEAR = "Nuclear" - LIGNITE = "Lignite" - HARD_COAL = "Hard Coal" - GAS = "Gas" - OIL = "Oil" - MIXED_FUEL = "Mixed Fuel" - OTHER1 = "Other 1" - OTHER2 = "Other 2" - OTHER3 = "Other 3" - OTHER4 = "Other 4" - - -class LocalTSGenerationBehavior(Enum): - """ - Options related to time series generation. - The option `USE_GLOBAL` is used by default. - - Attributes: - USE_GLOBAL: Use the global time series parameters. - FORCE_NO_GENERATION: Do not generate time series. - FORCE_GENERATION: Force the generation of time series. - """ - - USE_GLOBAL = "use global" - FORCE_NO_GENERATION = "force no generation" - FORCE_GENERATION = "force generation" - - -class ThermalCostGeneration(Enum): - SET_MANUALLY = "SetManually" - USE_COST_TIME_SERIES = "useCostTimeseries" - - -class ThermalClusterProperties(ClusterProperties): - """ - Thermal cluster configuration model. - This model describes the configuration parameters for a thermal cluster. - """ - - group: Optional[ThermalClusterGroup] = None - gen_ts: Optional[LocalTSGenerationBehavior] = None - min_stable_power: Optional[float] = None - min_up_time: Optional[int] = None - min_down_time: Optional[int] = None - must_run: Optional[bool] = None - spinning: Optional[float] = None - volatility_forced: Optional[float] = None - volatility_planned: Optional[float] = None - law_forced: Optional[LawOption] = None - law_planned: Optional[LawOption] = None - marginal_cost: Optional[float] = None - spread_cost: Optional[float] = None - fixed_cost: Optional[float] = None - startup_cost: Optional[float] = None - market_bid_cost: Optional[float] = None - co2: Optional[float] = None - # version 860 - nh3: Optional[float] = None - so2: Optional[float] = None - nox: Optional[float] = None - pm2_5: Optional[float] = None - pm5: Optional[float] = None - pm10: Optional[float] = None - nmvoc: Optional[float] = None - op1: Optional[float] = None - op2: Optional[float] = None - op3: Optional[float] = None - op4: Optional[float] = None - op5: Optional[float] = None - # version 870 - cost_generation: Optional[ThermalCostGeneration] = None - efficiency: Optional[float] = None - variable_o_m_cost: Optional[float] = None - - -class ThermalClusterPropertiesLocal(BaseModel): - def __init__( - self, - thermal_name: str, - thermal_cluster_properties: Optional[ThermalClusterProperties] = None, - **kwargs: Optional[Any], - ): - super().__init__(**kwargs) - thermal_cluster_properties = ( - thermal_cluster_properties or ThermalClusterProperties() - ) - self._thermal_name = thermal_name - self._enabled = check_if_none(thermal_cluster_properties.enabled, True) - self._unit_count = check_if_none(thermal_cluster_properties.unit_count, 1) - self._nominal_capacity = check_if_none( - thermal_cluster_properties.nominal_capacity, 0 - ) - self._group = ( - # The value OTHER1 matches AntaresWeb if a cluster is created via API without providing a group - thermal_cluster_properties.group - or ThermalClusterGroup.OTHER1 - ) - self._gen_ts = check_if_none( - thermal_cluster_properties.gen_ts, LocalTSGenerationBehavior.USE_GLOBAL - ) - self._min_stable_power = check_if_none( - thermal_cluster_properties.min_stable_power, 0 - ) - self._min_up_time = check_if_none(thermal_cluster_properties.min_up_time, 1) - self._min_down_time = check_if_none(thermal_cluster_properties.min_down_time, 1) - self._must_run = check_if_none(thermal_cluster_properties.must_run, False) - self._spinning = check_if_none(thermal_cluster_properties.spinning, 0) - self._volatility_forced = check_if_none( - thermal_cluster_properties.volatility_forced, 0 - ) - self._volatility_planned = check_if_none( - thermal_cluster_properties.volatility_planned, 0 - ) - self._law_forced = check_if_none( - thermal_cluster_properties.law_forced, LawOption.UNIFORM - ) - self._law_planned = check_if_none( - thermal_cluster_properties.law_planned, LawOption.UNIFORM - ) - self._marginal_cost = check_if_none(thermal_cluster_properties.marginal_cost, 0) - self._spread_cost = check_if_none(thermal_cluster_properties.spread_cost, 0) - self._fixed_cost = check_if_none(thermal_cluster_properties.fixed_cost, 0) - self._startup_cost = check_if_none(thermal_cluster_properties.startup_cost, 0) - self._market_bid_cost = check_if_none( - thermal_cluster_properties.market_bid_cost, 0 - ) - self._co2 = check_if_none(thermal_cluster_properties.co2, 0) - self._nh3 = check_if_none(thermal_cluster_properties.nh3, 0) - self._so2 = check_if_none(thermal_cluster_properties.so2, 0) - self._nox = check_if_none(thermal_cluster_properties.nox, 0) - self._pm2_5 = check_if_none(thermal_cluster_properties.pm2_5, 0) - self._pm5 = check_if_none(thermal_cluster_properties.pm5, 0) - self._pm10 = check_if_none(thermal_cluster_properties.pm10, 0) - self._nmvoc = check_if_none(thermal_cluster_properties.nmvoc, 0) - self._op1 = check_if_none(thermal_cluster_properties.op1, 0) - self._op2 = check_if_none(thermal_cluster_properties.op2, 0) - self._op3 = check_if_none(thermal_cluster_properties.op3, 0) - self._op4 = check_if_none(thermal_cluster_properties.op4, 0) - self._op5 = check_if_none(thermal_cluster_properties.op5, 0) - self._cost_generation = check_if_none( - thermal_cluster_properties.cost_generation, - ThermalCostGeneration.SET_MANUALLY, - ) - self._efficiency = check_if_none(thermal_cluster_properties.efficiency, 100) - self._variable_o_m_cost = check_if_none( - thermal_cluster_properties.variable_o_m_cost, 0 - ) - - @computed_field # type: ignore[misc] - @property - def list_ini_fields(self) -> dict[str, dict[str, str]]: - return { - f"{self._thermal_name}": { - "group": self._group.value, - "name": self._thermal_name, - "enabled": f"{self._enabled}", - "unitcount": f"{self._unit_count}", - "nominalcapacity": f"{self._nominal_capacity:.6f}", - "gen-ts": self._gen_ts.value, - "min-stable-power": f"{self._min_stable_power:.6f}", - "min-up-time": f"{self._min_up_time}", - "min-down-time": f"{self._min_down_time}", - "must-run": f"{self._must_run}", - "spinning": f"{self._spinning:.6f}", - "volatility.forced": f"{self._volatility_forced:.6f}", - "volatility.planned": f"{self._volatility_planned:.6f}", - "law.forced": self._law_forced.value, - "law.planned": self._law_planned.value, - "marginal-cost": f"{self._marginal_cost:.6f}", - "spread-cost": f"{self._spread_cost:.6f}", - "fixed-cost": f"{self._fixed_cost:.6f}", - "startup-cost": f"{self._startup_cost:.6f}", - "market-bid-cost": f"{self._market_bid_cost:.6f}", - "co2": f"{self._co2:.6f}", - "nh3": f"{self._nh3:.6f}", - "so2": f"{self._so2:.6f}", - "nox": f"{self._nox:.6f}", - "pm2_5": f"{self._pm2_5:.6f}", - "pm5": f"{self._pm5:.6f}", - "pm10": f"{self._pm10:.6f}", - "nmvoc": f"{self._nmvoc:.6f}", - "op1": f"{self._op1:.6f}", - "op2": f"{self._op2:.6f}", - "op3": f"{self._op3:.6f}", - "op4": f"{self._op4:.6f}", - "op5": f"{self._op5:.6f}", - "costgeneration": self._cost_generation.value, - "efficiency": f"{self._efficiency:.6f}", - "variableomcost": f"{self._variable_o_m_cost:.6f}", - } - } - - def yield_thermal_cluster_properties(self) -> ThermalClusterProperties: - return ThermalClusterProperties( - group=self._group, - enabled=self._enabled, - unit_count=self._unit_count, - nominal_capacity=self._nominal_capacity, - gen_ts=self._gen_ts, - min_stable_power=self._min_stable_power, - min_up_time=self._min_up_time, - min_down_time=self._min_down_time, - must_run=self._must_run, - spinning=self._spinning, - volatility_forced=self._volatility_forced, - volatility_planned=self._volatility_planned, - law_forced=self._law_forced, - law_planned=self._law_planned, - marginal_cost=self._marginal_cost, - spread_cost=self._spread_cost, - fixed_cost=self._fixed_cost, - startup_cost=self._startup_cost, - market_bid_cost=self._market_bid_cost, - co2=self._co2, - nh3=self._nh3, - so2=self._so2, - nox=self._nox, - pm2_5=self._pm2_5, - pm5=self._pm5, - pm10=self._pm10, - nmvoc=self._nmvoc, - op1=self._op1, - op2=self._op2, - op3=self._op3, - op4=self._op4, - op5=self._op5, - cost_generation=self._cost_generation, - efficiency=self._efficiency, - variable_o_m_cost=self._variable_o_m_cost, - ) - - -class ThermalClusterMatrixName(Enum): - PREPRO_DATA = "data" - PREPRO_MODULATION = "modulation" - SERIES = "series" - SERIES_CO2_COST = "CO2Cost" - SERIES_FUEL_COST = "fuelCost" - - -class ThermalCluster: - def __init__(self, thermal_service, area_id: str, name: str, properties: Optional[ThermalClusterProperties] = None): # type: ignore # TODO: Find a way to avoid circular imports - self._area_id = area_id - self._thermal_service = thermal_service - self._name = name - self._id = transform_name_to_id(name) - self._properties = properties or ThermalClusterProperties() - - # TODO: Add matrices. - - @property - def area_id(self) -> str: - return self._area_id - - @property - def name(self) -> str: - return self._name - - @property - def id(self) -> str: - return self._id - - @property - def properties(self) -> ThermalClusterProperties: - return self._properties - - def update_properties(self, properties: ThermalClusterProperties) -> None: - new_properties = self._thermal_service.update_thermal_properties( - self, properties - ) - self._properties = new_properties - - def get_prepro_data_matrix(self) -> pd.DataFrame: - return self._thermal_service.get_thermal_matrix( - self, ThermalClusterMatrixName.PREPRO_DATA - ) - - def get_prepro_modulation_matrix(self) -> pd.DataFrame: - return self._thermal_service.get_thermal_matrix( - self, ThermalClusterMatrixName.PREPRO_MODULATION - ) - - def get_series_matrix(self) -> pd.DataFrame: - return self._thermal_service.get_thermal_matrix( - self, ThermalClusterMatrixName.SERIES - ) - - def get_co2_cost_matrix(self) -> pd.DataFrame: - return self._thermal_service.get_thermal_matrix( - self, ThermalClusterMatrixName.SERIES_CO2_COST - ) - - def get_fuel_cost_matrix(self) -> pd.DataFrame: - return self._thermal_service.get_thermal_matrix( - self, ThermalClusterMatrixName.SERIES_FUEL_COST - ) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from typing import Optional, Any + +import pandas as pd +from pydantic import BaseModel, computed_field + +from antares.model.cluster import ClusterProperties +from antares.tools.contents_tool import transform_name_to_id +from antares.tools.ini_tool import check_if_none + + +class LawOption(Enum): + """ + Law options used for series generation. + The UNIFORM `law` is used by default. + """ + + UNIFORM = "uniform" + GEOMETRIC = "geometric" + + +class ThermalClusterGroup(Enum): + NUCLEAR = "Nuclear" + LIGNITE = "Lignite" + HARD_COAL = "Hard Coal" + GAS = "Gas" + OIL = "Oil" + MIXED_FUEL = "Mixed Fuel" + OTHER1 = "Other 1" + OTHER2 = "Other 2" + OTHER3 = "Other 3" + OTHER4 = "Other 4" + + +class LocalTSGenerationBehavior(Enum): + """ + Options related to time series generation. + The option `USE_GLOBAL` is used by default. + + Attributes: + USE_GLOBAL: Use the global time series parameters. + FORCE_NO_GENERATION: Do not generate time series. + FORCE_GENERATION: Force the generation of time series. + """ + + USE_GLOBAL = "use global" + FORCE_NO_GENERATION = "force no generation" + FORCE_GENERATION = "force generation" + + +class ThermalCostGeneration(Enum): + SET_MANUALLY = "SetManually" + USE_COST_TIME_SERIES = "useCostTimeseries" + + +class ThermalClusterProperties(ClusterProperties): + """ + Thermal cluster configuration model. + This model describes the configuration parameters for a thermal cluster. + """ + + group: Optional[ThermalClusterGroup] = None + gen_ts: Optional[LocalTSGenerationBehavior] = None + min_stable_power: Optional[float] = None + min_up_time: Optional[int] = None + min_down_time: Optional[int] = None + must_run: Optional[bool] = None + spinning: Optional[float] = None + volatility_forced: Optional[float] = None + volatility_planned: Optional[float] = None + law_forced: Optional[LawOption] = None + law_planned: Optional[LawOption] = None + marginal_cost: Optional[float] = None + spread_cost: Optional[float] = None + fixed_cost: Optional[float] = None + startup_cost: Optional[float] = None + market_bid_cost: Optional[float] = None + co2: Optional[float] = None + # version 860 + nh3: Optional[float] = None + so2: Optional[float] = None + nox: Optional[float] = None + pm2_5: Optional[float] = None + pm5: Optional[float] = None + pm10: Optional[float] = None + nmvoc: Optional[float] = None + op1: Optional[float] = None + op2: Optional[float] = None + op3: Optional[float] = None + op4: Optional[float] = None + op5: Optional[float] = None + # version 870 + cost_generation: Optional[ThermalCostGeneration] = None + efficiency: Optional[float] = None + variable_o_m_cost: Optional[float] = None + + +class ThermalClusterPropertiesLocal(BaseModel): + def __init__( + self, + thermal_name: str, + thermal_cluster_properties: Optional[ThermalClusterProperties] = None, + **kwargs: Optional[Any], + ): + super().__init__(**kwargs) + thermal_cluster_properties = thermal_cluster_properties or ThermalClusterProperties() + self._thermal_name = thermal_name + self._enabled = check_if_none(thermal_cluster_properties.enabled, True) + self._unit_count = check_if_none(thermal_cluster_properties.unit_count, 1) + self._nominal_capacity = check_if_none(thermal_cluster_properties.nominal_capacity, 0) + self._group = ( + # The value OTHER1 matches AntaresWeb if a cluster is created via API without providing a group + thermal_cluster_properties.group or ThermalClusterGroup.OTHER1 + ) + self._gen_ts = check_if_none(thermal_cluster_properties.gen_ts, LocalTSGenerationBehavior.USE_GLOBAL) + self._min_stable_power = check_if_none(thermal_cluster_properties.min_stable_power, 0) + self._min_up_time = check_if_none(thermal_cluster_properties.min_up_time, 1) + self._min_down_time = check_if_none(thermal_cluster_properties.min_down_time, 1) + self._must_run = check_if_none(thermal_cluster_properties.must_run, False) + self._spinning = check_if_none(thermal_cluster_properties.spinning, 0) + self._volatility_forced = check_if_none(thermal_cluster_properties.volatility_forced, 0) + self._volatility_planned = check_if_none(thermal_cluster_properties.volatility_planned, 0) + self._law_forced = check_if_none(thermal_cluster_properties.law_forced, LawOption.UNIFORM) + self._law_planned = check_if_none(thermal_cluster_properties.law_planned, LawOption.UNIFORM) + self._marginal_cost = check_if_none(thermal_cluster_properties.marginal_cost, 0) + self._spread_cost = check_if_none(thermal_cluster_properties.spread_cost, 0) + self._fixed_cost = check_if_none(thermal_cluster_properties.fixed_cost, 0) + self._startup_cost = check_if_none(thermal_cluster_properties.startup_cost, 0) + self._market_bid_cost = check_if_none(thermal_cluster_properties.market_bid_cost, 0) + self._co2 = check_if_none(thermal_cluster_properties.co2, 0) + self._nh3 = check_if_none(thermal_cluster_properties.nh3, 0) + self._so2 = check_if_none(thermal_cluster_properties.so2, 0) + self._nox = check_if_none(thermal_cluster_properties.nox, 0) + self._pm2_5 = check_if_none(thermal_cluster_properties.pm2_5, 0) + self._pm5 = check_if_none(thermal_cluster_properties.pm5, 0) + self._pm10 = check_if_none(thermal_cluster_properties.pm10, 0) + self._nmvoc = check_if_none(thermal_cluster_properties.nmvoc, 0) + self._op1 = check_if_none(thermal_cluster_properties.op1, 0) + self._op2 = check_if_none(thermal_cluster_properties.op2, 0) + self._op3 = check_if_none(thermal_cluster_properties.op3, 0) + self._op4 = check_if_none(thermal_cluster_properties.op4, 0) + self._op5 = check_if_none(thermal_cluster_properties.op5, 0) + self._cost_generation = check_if_none( + thermal_cluster_properties.cost_generation, + ThermalCostGeneration.SET_MANUALLY, + ) + self._efficiency = check_if_none(thermal_cluster_properties.efficiency, 100) + self._variable_o_m_cost = check_if_none(thermal_cluster_properties.variable_o_m_cost, 0) + + @computed_field # type: ignore[misc] + @property + def list_ini_fields(self) -> dict[str, dict[str, str]]: + return { + f"{self._thermal_name}": { + "group": self._group.value, + "name": self._thermal_name, + "enabled": f"{self._enabled}", + "unitcount": f"{self._unit_count}", + "nominalcapacity": f"{self._nominal_capacity:.6f}", + "gen-ts": self._gen_ts.value, + "min-stable-power": f"{self._min_stable_power:.6f}", + "min-up-time": f"{self._min_up_time}", + "min-down-time": f"{self._min_down_time}", + "must-run": f"{self._must_run}", + "spinning": f"{self._spinning:.6f}", + "volatility.forced": f"{self._volatility_forced:.6f}", + "volatility.planned": f"{self._volatility_planned:.6f}", + "law.forced": self._law_forced.value, + "law.planned": self._law_planned.value, + "marginal-cost": f"{self._marginal_cost:.6f}", + "spread-cost": f"{self._spread_cost:.6f}", + "fixed-cost": f"{self._fixed_cost:.6f}", + "startup-cost": f"{self._startup_cost:.6f}", + "market-bid-cost": f"{self._market_bid_cost:.6f}", + "co2": f"{self._co2:.6f}", + "nh3": f"{self._nh3:.6f}", + "so2": f"{self._so2:.6f}", + "nox": f"{self._nox:.6f}", + "pm2_5": f"{self._pm2_5:.6f}", + "pm5": f"{self._pm5:.6f}", + "pm10": f"{self._pm10:.6f}", + "nmvoc": f"{self._nmvoc:.6f}", + "op1": f"{self._op1:.6f}", + "op2": f"{self._op2:.6f}", + "op3": f"{self._op3:.6f}", + "op4": f"{self._op4:.6f}", + "op5": f"{self._op5:.6f}", + "costgeneration": self._cost_generation.value, + "efficiency": f"{self._efficiency:.6f}", + "variableomcost": f"{self._variable_o_m_cost:.6f}", + } + } + + def yield_thermal_cluster_properties(self) -> ThermalClusterProperties: + return ThermalClusterProperties( + group=self._group, + enabled=self._enabled, + unit_count=self._unit_count, + nominal_capacity=self._nominal_capacity, + gen_ts=self._gen_ts, + min_stable_power=self._min_stable_power, + min_up_time=self._min_up_time, + min_down_time=self._min_down_time, + must_run=self._must_run, + spinning=self._spinning, + volatility_forced=self._volatility_forced, + volatility_planned=self._volatility_planned, + law_forced=self._law_forced, + law_planned=self._law_planned, + marginal_cost=self._marginal_cost, + spread_cost=self._spread_cost, + fixed_cost=self._fixed_cost, + startup_cost=self._startup_cost, + market_bid_cost=self._market_bid_cost, + co2=self._co2, + nh3=self._nh3, + so2=self._so2, + nox=self._nox, + pm2_5=self._pm2_5, + pm5=self._pm5, + pm10=self._pm10, + nmvoc=self._nmvoc, + op1=self._op1, + op2=self._op2, + op3=self._op3, + op4=self._op4, + op5=self._op5, + cost_generation=self._cost_generation, + efficiency=self._efficiency, + variable_o_m_cost=self._variable_o_m_cost, + ) + + +class ThermalClusterMatrixName(Enum): + PREPRO_DATA = "data" + PREPRO_MODULATION = "modulation" + SERIES = "series" + SERIES_CO2_COST = "CO2Cost" + SERIES_FUEL_COST = "fuelCost" + + +class ThermalCluster: + def __init__(self, thermal_service, area_id: str, name: str, properties: Optional[ThermalClusterProperties] = None): # type: ignore # TODO: Find a way to avoid circular imports + self._area_id = area_id + self._thermal_service = thermal_service + self._name = name + self._id = transform_name_to_id(name) + self._properties = properties or ThermalClusterProperties() + + # TODO: Add matrices. + + @property + def area_id(self) -> str: + return self._area_id + + @property + def name(self) -> str: + return self._name + + @property + def id(self) -> str: + return self._id + + @property + def properties(self) -> ThermalClusterProperties: + return self._properties + + def update_properties(self, properties: ThermalClusterProperties) -> None: + new_properties = self._thermal_service.update_thermal_properties(self, properties) + self._properties = new_properties + + def get_prepro_data_matrix(self) -> pd.DataFrame: + return self._thermal_service.get_thermal_matrix(self, ThermalClusterMatrixName.PREPRO_DATA) + + def get_prepro_modulation_matrix(self) -> pd.DataFrame: + return self._thermal_service.get_thermal_matrix(self, ThermalClusterMatrixName.PREPRO_MODULATION) + + def get_series_matrix(self) -> pd.DataFrame: + return self._thermal_service.get_thermal_matrix(self, ThermalClusterMatrixName.SERIES) + + def get_co2_cost_matrix(self) -> pd.DataFrame: + return self._thermal_service.get_thermal_matrix(self, ThermalClusterMatrixName.SERIES_CO2_COST) + + def get_fuel_cost_matrix(self) -> pd.DataFrame: + return self._thermal_service.get_thermal_matrix(self, ThermalClusterMatrixName.SERIES_FUEL_COST) diff --git a/src/antares/model/wind.py b/src/antares/model/wind.py index 6ccd6a81..748dcc19 100644 --- a/src/antares/model/wind.py +++ b/src/antares/model/wind.py @@ -1,18 +1,18 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - - -from antares.tools.time_series_tool import TimeSeries - - -class Wind(TimeSeries): - pass +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + + +from antares.tools.time_series_tool import TimeSeries + + +class Wind(TimeSeries): + pass diff --git a/src/antares/service/api_services/area_api.py b/src/antares/service/api_services/area_api.py index 6d75e3ff..73ddca38 100644 --- a/src/antares/service/api_services/area_api.py +++ b/src/antares/service/api_services/area_api.py @@ -1,598 +1,554 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import json -from pathlib import PurePosixPath -from typing import Optional, Dict, Union, List - -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.api_conf.request_wrapper import RequestWrapper -from antares.exceptions.exceptions import ( - APIError, - AreaCreationError, - AreaPropertiesUpdateError, - AreaDeletionError, - AreaUiUpdateError, - MatrixUploadError, - HydroCreationError, - ThermalCreationError, - RenewableCreationError, - STStorageCreationError, - ThermalDeletionError, - RenewableDeletionError, - STStorageDeletionError, - LoadMatrixUploadError, - LoadMatrixDownloadError, -) -from antares.model.area import AreaProperties, AreaUi, Area -from antares.model.hydro import HydroProperties, HydroMatrixName, Hydro -from antares.model.misc_gen import MiscGen -from antares.model.renewable import RenewableClusterProperties, RenewableCluster -from antares.model.reserves import Reserves -from antares.model.solar import Solar -from antares.model.st_storage import STStorageProperties, STStorage -from antares.model.thermal import ThermalClusterProperties, ThermalCluster -from antares.model.wind import Wind -from antares.service.base_services import ( - BaseAreaService, - BaseShortTermStorageService, - BaseThermalService, - BaseRenewableService, -) -from antares.tools.contents_tool import AreaUiResponse -from antares.tools.matrix_tool import prepare_args_replace_matrix - - -class AreaApiService(BaseAreaService): - def __init__(self, config: APIconf, study_id: str) -> None: - super().__init__() - self.api_config = config - self.study_id = study_id - self._wrapper = RequestWrapper(self.api_config.set_up_api_conf()) - self._base_url = f"{self.api_config.get_host()}/api/v1" - self.storage_service: Optional[BaseShortTermStorageService] = None - self.thermal_service: Optional[BaseThermalService] = None - self.renewable_service: Optional[BaseRenewableService] = None - - def set_storage_service(self, storage_service: BaseShortTermStorageService) -> None: - self.storage_service = storage_service - - def set_thermal_service(self, thermal_service: BaseThermalService) -> None: - self.thermal_service = thermal_service - - def set_renewable_service(self, renewable_service: BaseRenewableService) -> None: - self.renewable_service = renewable_service - - def create_area( - self, - area_name: str, - properties: Optional[AreaProperties] = None, - ui: Optional[AreaUi] = None, - ) -> Area: - """ - Args: - area_name: area's name to be created. - properties: area's properties. If not provided, AntaresWeb will use its own default values. - ui: area's ui characteristics. If not provided, AntaresWeb will use its own default values. - - Returns: - The created area - - Raises: - MissingTokenError if api_token is missing - AreaCreationError if an HTTP Exception occurs - """ - # todo: AntaresWeb is stupid and x, y and color_rgb fields are mandatory ... - base_area_url = f"{self._base_url}/studies/{self.study_id}/areas" - - try: - response = self._wrapper.post( - base_area_url, json={"name": area_name, "type": "AREA"} - ) - area_id = response.json()["id"] - - if properties: - url = f"{base_area_url}/{area_id}/properties/form" - body = json.loads(properties.model_dump_json(exclude_none=True)) - if body: - self._wrapper.put(url, json=body) - if ui: - json_content = ui.model_dump(exclude_none=True) - url = f"{base_area_url}/{area_id}/ui" - if "layer" in json_content: - layer = json_content["layer"] - url += f"?layer={layer}" - del json_content["layer"] - if json_content: - # Gets current UI - response = self._wrapper.get(f"{base_area_url}?type=AREA&ui=true") - json_ui = response.json()[area_id] - ui_response = AreaUiResponse.model_validate(json_ui) - current_ui = ui_response.to_craft() - del current_ui["layer"] - # Updates the UI - current_ui.update(json_content) - self._wrapper.put(url, json=current_ui) - - url = f"{base_area_url}/{area_id}/properties/form" - response = self._wrapper.get(url) - area_properties = AreaProperties.model_validate(response.json()) - - # TODO: Ask AntaresWeb to do the same endpoint for only one area - url = f"{base_area_url}?type=AREA&ui=true" - response = self._wrapper.get(url) - json_ui = response.json()[area_id] - ui_response = AreaUiResponse.model_validate(json_ui) - ui_properties = AreaUi.model_validate(ui_response.to_craft()) - - except APIError as e: - raise AreaCreationError(area_name, e.message) from e - - return Area( - area_name, - self, - self.storage_service, - self.thermal_service, - self.renewable_service, - properties=area_properties, - ui=ui_properties, - ) - - def create_thermal_cluster( - self, - area_id: str, - thermal_name: str, - properties: Optional[ThermalClusterProperties] = None, - ) -> ThermalCluster: - """ - Args: - area_id: the area id of the thermal cluster - thermal_name: the name of the thermal cluster - properties: the properties of the thermal cluster. If not provided, AntaresWeb will use its own default values. - - Returns: - The created thermal cluster - - Raises: - MissingTokenError if api_token is missing - ThermalCreationError if an HTTP Exception occurs - """ - - try: - url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}/clusters/thermal" - body = {"name": thermal_name.lower()} - if properties: - camel_properties = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - body = {**body, **camel_properties} - response = self._wrapper.post(url, json=body) - json_response = response.json() - name = json_response["name"] - del json_response["name"] - del json_response["id"] - properties = ThermalClusterProperties.model_validate(json_response) - - except APIError as e: - raise ThermalCreationError(thermal_name, area_id, e.message) from e - - return ThermalCluster(self.thermal_service, area_id, name, properties) - - def create_thermal_cluster_with_matrices( - self, - area_id: str, - cluster_name: str, - parameters: ThermalClusterProperties, - prepro: Optional[pd.DataFrame] = None, - modulation: Optional[pd.DataFrame] = None, - series: Optional[pd.DataFrame] = None, - CO2Cost: Optional[pd.DataFrame] = None, - fuelCost: Optional[pd.DataFrame] = None, - ) -> ThermalCluster: - """ - Args: - - area_id: the area id of the thermal cluster - cluster_name: the name of the thermal cluster - parameters: the properties of the thermal cluster. - prepro: prepro matrix as a pandas DataFrame. - modulation: modulation matrix as a pandas DataFrame. - series: matrix for series at input/thermal/series/series.txt (optional). - CO2Cost: matrix for CO2Cost at input/thermal/series/CO2Cost.txt (optional). - fuelCost: matrix for CO2Cost at input/thermal/series/fuelCost.txt (optional). - - Returns: - The created thermal cluster with matrices. - - Raises: - MissingTokenError if api_token is missing - ThermalCreationError if an HTTP Exception occurs - """ - - try: - url = f"{self._base_url}/studies/{self.study_id}/commands" - body = { - "action": "create_cluster", - "args": { - "area_id": area_id, - "cluster_name": cluster_name, - "parameters": {}, - }, - } - args = body.get("args") - - if not isinstance(args, dict): - raise TypeError("body['args'] must be a dictionary") - - if parameters: - camel_properties = json.loads( - parameters.model_dump_json(by_alias=True, exclude_none=True) - ) - args["parameters"].update(camel_properties) - - if prepro is not None: - args["prepro"] = prepro.to_numpy().tolist() - if modulation is not None: - args["modulation"] = modulation.to_numpy().tolist() - - payload = [body] - response = self._wrapper.post(url, json=payload) - response.raise_for_status() - - if series is not None or CO2Cost is not None or fuelCost is not None: - self._create_thermal_series( - area_id, cluster_name, series, CO2Cost, fuelCost - ) - - except APIError as e: - raise ThermalCreationError(cluster_name, area_id, e.message) from e - - return ThermalCluster(self.thermal_service, area_id, cluster_name, parameters) - - def _create_thermal_series( - self, - area_id: str, - cluster_name: str, - series: Optional[pd.DataFrame], - CO2Cost: Optional[pd.DataFrame], - fuelCost: Optional[pd.DataFrame], - ) -> None: - command_body = [] - if series is not None: - series_path = ( - f"input/thermal/series/{area_id}/{cluster_name.lower()}/series" - ) - command_body.append(prepare_args_replace_matrix(series, series_path)) - - if CO2Cost is not None: - co2_cost_path = ( - f"input/thermal/series/{area_id}/{cluster_name.lower()}/CO2Cost" - ) - command_body.append(prepare_args_replace_matrix(CO2Cost, co2_cost_path)) - - if fuelCost is not None: - fuel_cost_path = ( - f"input/thermal/series/{area_id}/{cluster_name.lower()}/fuelCost" - ) - command_body.append(prepare_args_replace_matrix(fuelCost, fuel_cost_path)) - - if command_body: - json_payload = command_body - - self._replace_matrix_request(json_payload) - - def _replace_matrix_request(self, json_payload: Union[Dict, List[Dict]]) -> None: - """ - Send a POST request with the given JSON payload to commands endpoint. - - Args: Dict or List([Dict] with action = "replace_matrix" and matrix values - """ - - url = f"{self._base_url}/studies/{self.study_id}/commands" - response = self._wrapper.post(url, json=json_payload) - response.raise_for_status() - - def create_renewable_cluster( - self, - area_id: str, - renewable_name: str, - properties: Optional[RenewableClusterProperties], - series: Optional[pd.DataFrame], - ) -> RenewableCluster: - """ - Args: - area_id: the area id of the renewable cluster - renewable_name: the name of the renewable cluster - properties: the properties of the renewable cluster. If not provided, AntaresWeb will use its own default values - series: matrix for series.txt - - Returns: - The created renewable cluster - - Raises: - MissingTokenError if api_token is missing - RenewableCreationError if an HTTP Exception occurs - """ - try: - url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}/clusters/renewable" - body = {"name": renewable_name.lower()} - if properties: - camel_properties = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - body = {**body, **camel_properties} - response = self._wrapper.post(url, json=body) - json_response = response.json() - name = json_response["name"] - del json_response["name"] - del json_response["id"] - properties = RenewableClusterProperties.model_validate(json_response) - - if series is not None: - series_path = ( - f"input/renewables/series/{area_id}/{renewable_name.lower()}/series" - ) - command_body = [prepare_args_replace_matrix(series, series_path)] - self._replace_matrix_request(command_body) - - except APIError as e: - raise RenewableCreationError(renewable_name, area_id, e.message) from e - - return RenewableCluster(self.renewable_service, area_id, name, properties) - - def create_st_storage( - self, - area_id: str, - st_storage_name: str, - properties: Optional[STStorageProperties] = None, - ) -> STStorage: - """ - Args: - area_id: the area id of the short term storage - st_storage_name: the name of the short term storage - properties: the properties of the short term storage. If not provided, AntaresWeb will use its own default values. - - Returns: - The created renewable cluster - - Raises: - MissingTokenError if api_token is missing - STStorageCreationError if an HTTP Exception occurs - """ - try: - url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}/storages" - body = {"name": st_storage_name} - if properties: - camel_properties = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - body = {**body, **camel_properties} - response = self._wrapper.post(url, json=body) - json_response = response.json() - name = json_response["name"] - del json_response["name"] - del json_response["id"] - properties = STStorageProperties.model_validate(json_response) - - except APIError as e: - raise STStorageCreationError(st_storage_name, area_id, e.message) from e - - return STStorage(self.storage_service, area_id, name, properties) - - def _upload_series( - self, area: Area, series: Optional[pd.DataFrame], path: str - ) -> None: - try: - url = f"{self._base_url}/studies/{self.study_id}/raw?path={path}" - if series is not None: - array_data = series.to_numpy().tolist() - self._wrapper.post(url, json=array_data) - except APIError as e: - raise MatrixUploadError(area.id, e.message) from e - - def create_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: - series = series if series is not None else pd.DataFrame([]) - series_path = f"input/wind/series/wind_{area.id}" - self._upload_series(area, series, series_path) - return Wind(series) - - def create_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: - series = series if series is not None else pd.DataFrame([]) - series_path = f"input/reserves/{area.id}" - self._upload_series(area, series, series_path) - return Reserves(series) - - def create_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: - series = series if series is not None else pd.DataFrame([]) - series_path = f"input/solar/series/solar_{area.id}" - self._upload_series(area, series, series_path) - return Solar(series) - - def create_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: - series = series if series is not None else pd.DataFrame([]) - series_path = f"input/misc-gen/miscgen-{area.id}" - self._upload_series(area, series, series_path) - return MiscGen(series) - - def create_hydro( - self, - area_id: str, - properties: Optional[HydroProperties], - matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]], - ) -> Hydro: - # todo: not model validation because endpoint does not return anything - # properties = HydroProperties.model_validate(json_response) not possible - - try: - url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}/hydro/form" - body = {} - if properties: - camel_properties = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - body = {**camel_properties} - self._wrapper.put(url, json=body) - - if matrices is not None: - self._create_hydro_series(area_id, matrices) - - except APIError as e: - raise HydroCreationError(area_id, e.message) from e - - return Hydro(self, area_id, properties) - - def _create_hydro_series( - self, area_id: str, matrices: Dict[HydroMatrixName, pd.DataFrame] - ) -> None: - command_body = [] - for matrix_name, series in matrices.items(): - if "SERIES" in matrix_name.name: - series_path = f"input/hydro/series/{area_id}/{matrix_name.value}" - command_body.append(prepare_args_replace_matrix(series, series_path)) - if "PREPRO" in matrix_name.name: - series_path = f"input/hydro/prepro/{area_id}/{matrix_name.value}" - command_body.append(prepare_args_replace_matrix(series, series_path)) - if "COMMON" in matrix_name.name: - series_path = ( - f"input/hydro/common/capacity/{matrix_name.value}_{area_id}" - ) - command_body.append(prepare_args_replace_matrix(series, series_path)) - if command_body: - json_payload = command_body - - self._replace_matrix_request(json_payload) - - def update_area_properties( - self, area: Area, properties: AreaProperties - ) -> AreaProperties: - url = ( - f"{self._base_url}/studies/{self.study_id}/areas/{area.id}/properties/form" - ) - try: - body = json.loads(properties.model_dump_json(exclude_none=True)) - if not body: - return area.properties - - self._wrapper.put(url, json=body) - response = self._wrapper.get(url) - area_properties = AreaProperties.model_validate(response.json()) - - except APIError as e: - raise AreaPropertiesUpdateError(area.id, e.message) from e - - return area_properties - - def update_area_ui(self, area: Area, ui: AreaUi) -> AreaUi: - base_url = f"{self._base_url}/studies/{self.study_id}/areas" - try: - url = f"{base_url}/{area.id}/ui" - json_content = ui.model_dump(exclude_none=True) - if "layer" in json_content: - layer = json_content["layer"] - url += f"?layer={layer}" - del json_content["layer"] - if not json_content: - return area.ui - - # Gets current UI - response = self._wrapper.get(f"{base_url}?type=AREA&ui=true") - json_ui = response.json()[area.id] - ui_response = AreaUiResponse.model_validate(json_ui) - current_ui = ui_response.to_craft() - del current_ui["layer"] - # Updates the UI - current_ui.update(json_content) - self._wrapper.put(url, json=current_ui) - - url = f"{base_url}?type=AREA&ui=true" - response = self._wrapper.get(url) - json_ui = response.json()[area.id] - ui_response = AreaUiResponse.model_validate(json_ui) - area_ui = AreaUi.model_validate(ui_response.to_craft()) - - except APIError as e: - raise AreaUiUpdateError(area.id, e.message) from e - - return area_ui - - def delete_area(self, area: Area) -> None: - area_id = area.id - url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}" - try: - self._wrapper.delete(url) - except APIError as e: - raise AreaDeletionError(area_id, e.message) from e - - def delete_thermal_clusters( - self, area: Area, clusters: List[ThermalCluster] - ) -> None: - url = ( - f"{self._base_url}/studies/{self.study_id}/areas/{area.id}/clusters/thermal" - ) - body = [cluster.id for cluster in clusters] - try: - self._wrapper.delete(url, json=body) - except APIError as e: - raise ThermalDeletionError(area.id, body, e.message) from e - - def delete_renewable_clusters( - self, area: Area, clusters: List[RenewableCluster] - ) -> None: - url = f"{self._base_url}/studies/{self.study_id}/areas/{area.id}/clusters/renewable" - body = [cluster.id for cluster in clusters] - try: - self._wrapper.delete(url, json=body) - except APIError as e: - raise RenewableDeletionError(area.id, body, e.message) from e - - def delete_st_storages(self, area: Area, storages: List[STStorage]) -> None: - url = f"{self._base_url}/studies/{self.study_id}/areas/{area.id}/storages" - body = [storage.id for storage in storages] - try: - self._wrapper.delete(url, json=body) - except APIError as e: - raise STStorageDeletionError(area.id, body, e.message) from e - - def upload_load_matrix(self, area: Area, load_matrix: pd.DataFrame) -> None: - path = PurePosixPath("input") / "load" / "series" / f"load_{area.id}" - url = f"{self._base_url}/studies/{self.study_id}/raw?path={path}" - - try: - rows_number = load_matrix.shape[0] - expected_rows = 8760 - if rows_number < expected_rows: - raise APIError( - f"Expected {expected_rows} rows and received {rows_number}." - ) - array_data = load_matrix.to_numpy().tolist() - self._wrapper.post(url, json=array_data) - except APIError as e: - raise LoadMatrixUploadError(area.id, e.message) from e - - def get_matrix(self, path: PurePosixPath) -> pd.DataFrame: - raw_url = f"{self._base_url}/studies/{self.study_id}/raw?path={path}" - response = self._wrapper.get(raw_url) - json_df = response.json() - dataframe = pd.DataFrame( - data=json_df["data"], index=json_df["index"], columns=json_df["columns"] - ) - return dataframe - - def get_load_matrix(self, area: Area) -> pd.DataFrame: - try: - return self.get_matrix( - PurePosixPath("input") / "load" / "series" / f"load_{area.id}" - ) - except APIError as e: - raise LoadMatrixDownloadError(area.id, e.message) from e +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import json +from pathlib import PurePosixPath +from typing import Optional, Dict, Union, List + +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.api_conf.request_wrapper import RequestWrapper +from antares.exceptions.exceptions import ( + APIError, + AreaCreationError, + AreaPropertiesUpdateError, + AreaDeletionError, + AreaUiUpdateError, + MatrixUploadError, + HydroCreationError, + ThermalCreationError, + RenewableCreationError, + STStorageCreationError, + ThermalDeletionError, + RenewableDeletionError, + STStorageDeletionError, + LoadMatrixUploadError, + LoadMatrixDownloadError, +) +from antares.model.area import AreaProperties, AreaUi, Area +from antares.model.hydro import HydroProperties, HydroMatrixName, Hydro +from antares.model.misc_gen import MiscGen +from antares.model.renewable import RenewableClusterProperties, RenewableCluster +from antares.model.reserves import Reserves +from antares.model.solar import Solar +from antares.model.st_storage import STStorageProperties, STStorage +from antares.model.thermal import ThermalClusterProperties, ThermalCluster +from antares.model.wind import Wind +from antares.service.base_services import ( + BaseAreaService, + BaseShortTermStorageService, + BaseThermalService, + BaseRenewableService, +) +from antares.tools.contents_tool import AreaUiResponse +from antares.tools.matrix_tool import prepare_args_replace_matrix + + +class AreaApiService(BaseAreaService): + def __init__(self, config: APIconf, study_id: str) -> None: + super().__init__() + self.api_config = config + self.study_id = study_id + self._wrapper = RequestWrapper(self.api_config.set_up_api_conf()) + self._base_url = f"{self.api_config.get_host()}/api/v1" + self.storage_service: Optional[BaseShortTermStorageService] = None + self.thermal_service: Optional[BaseThermalService] = None + self.renewable_service: Optional[BaseRenewableService] = None + + def set_storage_service(self, storage_service: BaseShortTermStorageService) -> None: + self.storage_service = storage_service + + def set_thermal_service(self, thermal_service: BaseThermalService) -> None: + self.thermal_service = thermal_service + + def set_renewable_service(self, renewable_service: BaseRenewableService) -> None: + self.renewable_service = renewable_service + + def create_area( + self, + area_name: str, + properties: Optional[AreaProperties] = None, + ui: Optional[AreaUi] = None, + ) -> Area: + """ + Args: + area_name: area's name to be created. + properties: area's properties. If not provided, AntaresWeb will use its own default values. + ui: area's ui characteristics. If not provided, AntaresWeb will use its own default values. + + Returns: + The created area + + Raises: + MissingTokenError if api_token is missing + AreaCreationError if an HTTP Exception occurs + """ + # todo: AntaresWeb is stupid and x, y and color_rgb fields are mandatory ... + base_area_url = f"{self._base_url}/studies/{self.study_id}/areas" + + try: + response = self._wrapper.post(base_area_url, json={"name": area_name, "type": "AREA"}) + area_id = response.json()["id"] + + if properties: + url = f"{base_area_url}/{area_id}/properties/form" + body = json.loads(properties.model_dump_json(exclude_none=True)) + if body: + self._wrapper.put(url, json=body) + if ui: + json_content = ui.model_dump(exclude_none=True) + url = f"{base_area_url}/{area_id}/ui" + if "layer" in json_content: + layer = json_content["layer"] + url += f"?layer={layer}" + del json_content["layer"] + if json_content: + # Gets current UI + response = self._wrapper.get(f"{base_area_url}?type=AREA&ui=true") + json_ui = response.json()[area_id] + ui_response = AreaUiResponse.model_validate(json_ui) + current_ui = ui_response.to_craft() + del current_ui["layer"] + # Updates the UI + current_ui.update(json_content) + self._wrapper.put(url, json=current_ui) + + url = f"{base_area_url}/{area_id}/properties/form" + response = self._wrapper.get(url) + area_properties = AreaProperties.model_validate(response.json()) + + # TODO: Ask AntaresWeb to do the same endpoint for only one area + url = f"{base_area_url}?type=AREA&ui=true" + response = self._wrapper.get(url) + json_ui = response.json()[area_id] + ui_response = AreaUiResponse.model_validate(json_ui) + ui_properties = AreaUi.model_validate(ui_response.to_craft()) + + except APIError as e: + raise AreaCreationError(area_name, e.message) from e + + return Area( + area_name, + self, + self.storage_service, + self.thermal_service, + self.renewable_service, + properties=area_properties, + ui=ui_properties, + ) + + def create_thermal_cluster( + self, + area_id: str, + thermal_name: str, + properties: Optional[ThermalClusterProperties] = None, + ) -> ThermalCluster: + """ + Args: + area_id: the area id of the thermal cluster + thermal_name: the name of the thermal cluster + properties: the properties of the thermal cluster. If not provided, AntaresWeb will use its own default values. + + Returns: + The created thermal cluster + + Raises: + MissingTokenError if api_token is missing + ThermalCreationError if an HTTP Exception occurs + """ + + try: + url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}/clusters/thermal" + body = {"name": thermal_name.lower()} + if properties: + camel_properties = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + body = {**body, **camel_properties} + response = self._wrapper.post(url, json=body) + json_response = response.json() + name = json_response["name"] + del json_response["name"] + del json_response["id"] + properties = ThermalClusterProperties.model_validate(json_response) + + except APIError as e: + raise ThermalCreationError(thermal_name, area_id, e.message) from e + + return ThermalCluster(self.thermal_service, area_id, name, properties) + + def create_thermal_cluster_with_matrices( + self, + area_id: str, + cluster_name: str, + parameters: ThermalClusterProperties, + prepro: Optional[pd.DataFrame] = None, + modulation: Optional[pd.DataFrame] = None, + series: Optional[pd.DataFrame] = None, + CO2Cost: Optional[pd.DataFrame] = None, + fuelCost: Optional[pd.DataFrame] = None, + ) -> ThermalCluster: + """ + Args: + + area_id: the area id of the thermal cluster + cluster_name: the name of the thermal cluster + parameters: the properties of the thermal cluster. + prepro: prepro matrix as a pandas DataFrame. + modulation: modulation matrix as a pandas DataFrame. + series: matrix for series at input/thermal/series/series.txt (optional). + CO2Cost: matrix for CO2Cost at input/thermal/series/CO2Cost.txt (optional). + fuelCost: matrix for CO2Cost at input/thermal/series/fuelCost.txt (optional). + + Returns: + The created thermal cluster with matrices. + + Raises: + MissingTokenError if api_token is missing + ThermalCreationError if an HTTP Exception occurs + """ + + try: + url = f"{self._base_url}/studies/{self.study_id}/commands" + body = { + "action": "create_cluster", + "args": { + "area_id": area_id, + "cluster_name": cluster_name, + "parameters": {}, + }, + } + args = body.get("args") + + if not isinstance(args, dict): + raise TypeError("body['args'] must be a dictionary") + + if parameters: + camel_properties = json.loads(parameters.model_dump_json(by_alias=True, exclude_none=True)) + args["parameters"].update(camel_properties) + + if prepro is not None: + args["prepro"] = prepro.to_numpy().tolist() + if modulation is not None: + args["modulation"] = modulation.to_numpy().tolist() + + payload = [body] + response = self._wrapper.post(url, json=payload) + response.raise_for_status() + + if series is not None or CO2Cost is not None or fuelCost is not None: + self._create_thermal_series(area_id, cluster_name, series, CO2Cost, fuelCost) + + except APIError as e: + raise ThermalCreationError(cluster_name, area_id, e.message) from e + + return ThermalCluster(self.thermal_service, area_id, cluster_name, parameters) + + def _create_thermal_series( + self, + area_id: str, + cluster_name: str, + series: Optional[pd.DataFrame], + CO2Cost: Optional[pd.DataFrame], + fuelCost: Optional[pd.DataFrame], + ) -> None: + command_body = [] + if series is not None: + series_path = f"input/thermal/series/{area_id}/{cluster_name.lower()}/series" + command_body.append(prepare_args_replace_matrix(series, series_path)) + + if CO2Cost is not None: + co2_cost_path = f"input/thermal/series/{area_id}/{cluster_name.lower()}/CO2Cost" + command_body.append(prepare_args_replace_matrix(CO2Cost, co2_cost_path)) + + if fuelCost is not None: + fuel_cost_path = f"input/thermal/series/{area_id}/{cluster_name.lower()}/fuelCost" + command_body.append(prepare_args_replace_matrix(fuelCost, fuel_cost_path)) + + if command_body: + json_payload = command_body + + self._replace_matrix_request(json_payload) + + def _replace_matrix_request(self, json_payload: Union[Dict, List[Dict]]) -> None: + """ + Send a POST request with the given JSON payload to commands endpoint. + + Args: Dict or List([Dict] with action = "replace_matrix" and matrix values + """ + + url = f"{self._base_url}/studies/{self.study_id}/commands" + response = self._wrapper.post(url, json=json_payload) + response.raise_for_status() + + def create_renewable_cluster( + self, + area_id: str, + renewable_name: str, + properties: Optional[RenewableClusterProperties], + series: Optional[pd.DataFrame], + ) -> RenewableCluster: + """ + Args: + area_id: the area id of the renewable cluster + renewable_name: the name of the renewable cluster + properties: the properties of the renewable cluster. If not provided, AntaresWeb will use its own default values + series: matrix for series.txt + + Returns: + The created renewable cluster + + Raises: + MissingTokenError if api_token is missing + RenewableCreationError if an HTTP Exception occurs + """ + try: + url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}/clusters/renewable" + body = {"name": renewable_name.lower()} + if properties: + camel_properties = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + body = {**body, **camel_properties} + response = self._wrapper.post(url, json=body) + json_response = response.json() + name = json_response["name"] + del json_response["name"] + del json_response["id"] + properties = RenewableClusterProperties.model_validate(json_response) + + if series is not None: + series_path = f"input/renewables/series/{area_id}/{renewable_name.lower()}/series" + command_body = [prepare_args_replace_matrix(series, series_path)] + self._replace_matrix_request(command_body) + + except APIError as e: + raise RenewableCreationError(renewable_name, area_id, e.message) from e + + return RenewableCluster(self.renewable_service, area_id, name, properties) + + def create_st_storage( + self, + area_id: str, + st_storage_name: str, + properties: Optional[STStorageProperties] = None, + ) -> STStorage: + """ + Args: + area_id: the area id of the short term storage + st_storage_name: the name of the short term storage + properties: the properties of the short term storage. If not provided, AntaresWeb will use its own default values. + + Returns: + The created renewable cluster + + Raises: + MissingTokenError if api_token is missing + STStorageCreationError if an HTTP Exception occurs + """ + try: + url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}/storages" + body = {"name": st_storage_name} + if properties: + camel_properties = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + body = {**body, **camel_properties} + response = self._wrapper.post(url, json=body) + json_response = response.json() + name = json_response["name"] + del json_response["name"] + del json_response["id"] + properties = STStorageProperties.model_validate(json_response) + + except APIError as e: + raise STStorageCreationError(st_storage_name, area_id, e.message) from e + + return STStorage(self.storage_service, area_id, name, properties) + + def _upload_series(self, area: Area, series: Optional[pd.DataFrame], path: str) -> None: + try: + url = f"{self._base_url}/studies/{self.study_id}/raw?path={path}" + if series is not None: + array_data = series.to_numpy().tolist() + self._wrapper.post(url, json=array_data) + except APIError as e: + raise MatrixUploadError(area.id, e.message) from e + + def create_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: + series = series if series is not None else pd.DataFrame([]) + series_path = f"input/wind/series/wind_{area.id}" + self._upload_series(area, series, series_path) + return Wind(series) + + def create_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: + series = series if series is not None else pd.DataFrame([]) + series_path = f"input/reserves/{area.id}" + self._upload_series(area, series, series_path) + return Reserves(series) + + def create_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: + series = series if series is not None else pd.DataFrame([]) + series_path = f"input/solar/series/solar_{area.id}" + self._upload_series(area, series, series_path) + return Solar(series) + + def create_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: + series = series if series is not None else pd.DataFrame([]) + series_path = f"input/misc-gen/miscgen-{area.id}" + self._upload_series(area, series, series_path) + return MiscGen(series) + + def create_hydro( + self, + area_id: str, + properties: Optional[HydroProperties], + matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]], + ) -> Hydro: + # todo: not model validation because endpoint does not return anything + # properties = HydroProperties.model_validate(json_response) not possible + + try: + url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}/hydro/form" + body = {} + if properties: + camel_properties = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + body = {**camel_properties} + self._wrapper.put(url, json=body) + + if matrices is not None: + self._create_hydro_series(area_id, matrices) + + except APIError as e: + raise HydroCreationError(area_id, e.message) from e + + return Hydro(self, area_id, properties) + + def _create_hydro_series(self, area_id: str, matrices: Dict[HydroMatrixName, pd.DataFrame]) -> None: + command_body = [] + for matrix_name, series in matrices.items(): + if "SERIES" in matrix_name.name: + series_path = f"input/hydro/series/{area_id}/{matrix_name.value}" + command_body.append(prepare_args_replace_matrix(series, series_path)) + if "PREPRO" in matrix_name.name: + series_path = f"input/hydro/prepro/{area_id}/{matrix_name.value}" + command_body.append(prepare_args_replace_matrix(series, series_path)) + if "COMMON" in matrix_name.name: + series_path = f"input/hydro/common/capacity/{matrix_name.value}_{area_id}" + command_body.append(prepare_args_replace_matrix(series, series_path)) + if command_body: + json_payload = command_body + + self._replace_matrix_request(json_payload) + + def update_area_properties(self, area: Area, properties: AreaProperties) -> AreaProperties: + url = f"{self._base_url}/studies/{self.study_id}/areas/{area.id}/properties/form" + try: + body = json.loads(properties.model_dump_json(exclude_none=True)) + if not body: + return area.properties + + self._wrapper.put(url, json=body) + response = self._wrapper.get(url) + area_properties = AreaProperties.model_validate(response.json()) + + except APIError as e: + raise AreaPropertiesUpdateError(area.id, e.message) from e + + return area_properties + + def update_area_ui(self, area: Area, ui: AreaUi) -> AreaUi: + base_url = f"{self._base_url}/studies/{self.study_id}/areas" + try: + url = f"{base_url}/{area.id}/ui" + json_content = ui.model_dump(exclude_none=True) + if "layer" in json_content: + layer = json_content["layer"] + url += f"?layer={layer}" + del json_content["layer"] + if not json_content: + return area.ui + + # Gets current UI + response = self._wrapper.get(f"{base_url}?type=AREA&ui=true") + json_ui = response.json()[area.id] + ui_response = AreaUiResponse.model_validate(json_ui) + current_ui = ui_response.to_craft() + del current_ui["layer"] + # Updates the UI + current_ui.update(json_content) + self._wrapper.put(url, json=current_ui) + + url = f"{base_url}?type=AREA&ui=true" + response = self._wrapper.get(url) + json_ui = response.json()[area.id] + ui_response = AreaUiResponse.model_validate(json_ui) + area_ui = AreaUi.model_validate(ui_response.to_craft()) + + except APIError as e: + raise AreaUiUpdateError(area.id, e.message) from e + + return area_ui + + def delete_area(self, area: Area) -> None: + area_id = area.id + url = f"{self._base_url}/studies/{self.study_id}/areas/{area_id}" + try: + self._wrapper.delete(url) + except APIError as e: + raise AreaDeletionError(area_id, e.message) from e + + def delete_thermal_clusters(self, area: Area, clusters: List[ThermalCluster]) -> None: + url = f"{self._base_url}/studies/{self.study_id}/areas/{area.id}/clusters/thermal" + body = [cluster.id for cluster in clusters] + try: + self._wrapper.delete(url, json=body) + except APIError as e: + raise ThermalDeletionError(area.id, body, e.message) from e + + def delete_renewable_clusters(self, area: Area, clusters: List[RenewableCluster]) -> None: + url = f"{self._base_url}/studies/{self.study_id}/areas/{area.id}/clusters/renewable" + body = [cluster.id for cluster in clusters] + try: + self._wrapper.delete(url, json=body) + except APIError as e: + raise RenewableDeletionError(area.id, body, e.message) from e + + def delete_st_storages(self, area: Area, storages: List[STStorage]) -> None: + url = f"{self._base_url}/studies/{self.study_id}/areas/{area.id}/storages" + body = [storage.id for storage in storages] + try: + self._wrapper.delete(url, json=body) + except APIError as e: + raise STStorageDeletionError(area.id, body, e.message) from e + + def upload_load_matrix(self, area: Area, load_matrix: pd.DataFrame) -> None: + path = PurePosixPath("input") / "load" / "series" / f"load_{area.id}" + url = f"{self._base_url}/studies/{self.study_id}/raw?path={path}" + + try: + rows_number = load_matrix.shape[0] + expected_rows = 8760 + if rows_number < expected_rows: + raise APIError(f"Expected {expected_rows} rows and received {rows_number}.") + array_data = load_matrix.to_numpy().tolist() + self._wrapper.post(url, json=array_data) + except APIError as e: + raise LoadMatrixUploadError(area.id, e.message) from e + + def get_matrix(self, path: PurePosixPath) -> pd.DataFrame: + raw_url = f"{self._base_url}/studies/{self.study_id}/raw?path={path}" + response = self._wrapper.get(raw_url) + json_df = response.json() + dataframe = pd.DataFrame(data=json_df["data"], index=json_df["index"], columns=json_df["columns"]) + return dataframe + + def get_load_matrix(self, area: Area) -> pd.DataFrame: + try: + return self.get_matrix(PurePosixPath("input") / "load" / "series" / f"load_{area.id}") + except APIError as e: + raise LoadMatrixDownloadError(area.id, e.message) from e diff --git a/src/antares/service/api_services/binding_constraint_api.py b/src/antares/service/api_services/binding_constraint_api.py index dd1c5711..9132c942 100644 --- a/src/antares/service/api_services/binding_constraint_api.py +++ b/src/antares/service/api_services/binding_constraint_api.py @@ -1,209 +1,179 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import json -from pathlib import PurePosixPath -from typing import Optional, List - -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.api_conf.request_wrapper import RequestWrapper -from antares.exceptions.exceptions import ( - APIError, - BindingConstraintCreationError, - ConstraintTermDeletionError, - ConstraintPropertiesUpdateError, - ConstraintMatrixDownloadError, - ConstraintMatrixUpdateError, - ConstraintTermAdditionError, -) -from antares.model.binding_constraint import ( - BindingConstraint, - BindingConstraintProperties, - ConstraintTerm, - ConstraintMatrixName, -) -from antares.service.api_services.utils import get_matrix -from antares.service.base_services import BaseBindingConstraintService - - -class BindingConstraintApiService(BaseBindingConstraintService): - def __init__(self, config: APIconf, study_id: str) -> None: - super().__init__() - self.api_config = config - self.study_id = study_id - self._wrapper = RequestWrapper(self.api_config.set_up_api_conf()) - self._base_url = f"{self.api_config.get_host()}/api/v1" - - def create_binding_constraint( - self, - name: str, - properties: Optional[BindingConstraintProperties] = None, - terms: Optional[List[ConstraintTerm]] = None, - less_term_matrix: Optional[pd.DataFrame] = None, - equal_term_matrix: Optional[pd.DataFrame] = None, - greater_term_matrix: Optional[pd.DataFrame] = None, - ) -> BindingConstraint: - """ - Args: - name: the binding constraint name - properties: the properties of the constraint. If not provided, AntaresWeb will use its own default values. - terms: the terms of the constraint. If not provided, no term will be created. - less_term_matrix: matrix corresponding to the lower bound of the constraint. If not provided, no matrix will be created. - equal_term_matrix: matrix corresponding to the equality bound of the constraint. If not provided, no matrix will be created. - greater_term_matrix: matrix corresponding to the upper bound of the constraint. If not provided, no matrix will be created. - - Returns: - The created binding constraint - - Raises: - MissingTokenError if api_token is missing - BindingConstraintCreationError if an HTTP Exception occurs - """ - base_url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints" - - try: - body = {"name": name} - if properties: - camel_properties = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - body = {**body, **camel_properties} - for matrix, matrix_name in zip( - [less_term_matrix, equal_term_matrix, greater_term_matrix], - ["lessTermMatrix", "equalTermMatrix", "greaterTermMatrix"], - ): - if matrix is not None: - body[matrix_name] = matrix.to_numpy().tolist() - response = self._wrapper.post(base_url, json=body) - created_properties = response.json() - bc_id = created_properties["id"] - for key in ["terms", "id", "name"]: - del created_properties[key] - bc_properties = BindingConstraintProperties.model_validate( - created_properties - ) - bc_terms: List[ConstraintTerm] = [] - - if terms: - json_terms = [term.model_dump() for term in terms] - url = f"{base_url}/{bc_id}/terms" - self._wrapper.post(url, json=json_terms) - - url = f"{base_url}/{bc_id}" - response = self._wrapper.get(url) - created_terms = response.json()["terms"] - bc_terms = [ - ConstraintTerm.model_validate(term) for term in created_terms - ] - - except APIError as e: - raise BindingConstraintCreationError(name, e.message) from e - - return BindingConstraint(name, self, bc_properties, bc_terms) - - def delete_binding_constraint_term(self, constraint_id: str, term_id: str) -> None: - url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{constraint_id}/term/{term_id}" - try: - self._wrapper.delete(url) - except APIError as e: - raise ConstraintTermDeletionError(constraint_id, term_id, e.message) from e - - def update_binding_constraint_properties( - self, - binding_constraint: BindingConstraint, - properties: BindingConstraintProperties, - ) -> BindingConstraintProperties: - url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{binding_constraint.id}" - try: - body = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - if not body: - return binding_constraint.properties - - response = self._wrapper.put(url, json=body) - json_response = response.json() - for key in ["terms", "id", "name"]: - del json_response[key] - new_properties = BindingConstraintProperties.model_validate(json_response) - - except APIError as e: - raise ConstraintPropertiesUpdateError( - binding_constraint.id, e.message - ) from e - - return new_properties - - def get_constraint_matrix( - self, constraint: BindingConstraint, matrix_name: ConstraintMatrixName - ) -> pd.DataFrame: - try: - path = ( - PurePosixPath("input") - / "bindingconstraints" - / f"{constraint.id}_{matrix_name.value}" - ) - return get_matrix( - f"{self._base_url}/studies/{self.study_id}/raw?path={path}", - self._wrapper, - ) - except APIError as e: - raise ConstraintMatrixDownloadError( - constraint.id, matrix_name.value, e.message - ) from e - - def update_constraint_matrix( - self, - constraint: BindingConstraint, - matrix_name: ConstraintMatrixName, - matrix: pd.DataFrame, - ) -> None: - mapping = { - ConstraintMatrixName.LESS_TERM: "lessTermMatrix", - ConstraintMatrixName.GREATER_TERM: "greaterTermMatrix", - ConstraintMatrixName.EQUAL_TERM: "equalTermMatrix", - } - url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{constraint.id}" - try: - body = {mapping[matrix_name]: matrix.to_numpy().tolist()} - self._wrapper.put(url, json=body) - except APIError as e: - raise ConstraintMatrixUpdateError( - constraint.id, matrix_name.value, e.message - ) from e - - def add_constraint_terms( - self, constraint: BindingConstraint, terms: List[ConstraintTerm] - ) -> List[ConstraintTerm]: - url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{constraint.id}" - try: - json_terms = [term.model_dump() for term in terms] - self._wrapper.post(f"{url}/terms", json=json_terms) - response = self._wrapper.get(url) - all_terms = response.json()["terms"] - validated_terms = [ - ConstraintTerm.model_validate(term) for term in all_terms - ] - new_terms = [ - term - for term in validated_terms - if term.id not in constraint.get_terms() - ] - - except APIError as e: - raise ConstraintTermAdditionError( - constraint.id, [term.id for term in terms], e.message - ) from e - - return new_terms +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import json +from pathlib import PurePosixPath +from typing import Optional, List + +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.api_conf.request_wrapper import RequestWrapper +from antares.exceptions.exceptions import ( + APIError, + BindingConstraintCreationError, + ConstraintTermDeletionError, + ConstraintPropertiesUpdateError, + ConstraintMatrixDownloadError, + ConstraintMatrixUpdateError, + ConstraintTermAdditionError, +) +from antares.model.binding_constraint import ( + BindingConstraint, + BindingConstraintProperties, + ConstraintTerm, + ConstraintMatrixName, +) +from antares.service.api_services.utils import get_matrix +from antares.service.base_services import BaseBindingConstraintService + + +class BindingConstraintApiService(BaseBindingConstraintService): + def __init__(self, config: APIconf, study_id: str) -> None: + super().__init__() + self.api_config = config + self.study_id = study_id + self._wrapper = RequestWrapper(self.api_config.set_up_api_conf()) + self._base_url = f"{self.api_config.get_host()}/api/v1" + + def create_binding_constraint( + self, + name: str, + properties: Optional[BindingConstraintProperties] = None, + terms: Optional[List[ConstraintTerm]] = None, + less_term_matrix: Optional[pd.DataFrame] = None, + equal_term_matrix: Optional[pd.DataFrame] = None, + greater_term_matrix: Optional[pd.DataFrame] = None, + ) -> BindingConstraint: + """ + Args: + name: the binding constraint name + properties: the properties of the constraint. If not provided, AntaresWeb will use its own default values. + terms: the terms of the constraint. If not provided, no term will be created. + less_term_matrix: matrix corresponding to the lower bound of the constraint. If not provided, no matrix will be created. + equal_term_matrix: matrix corresponding to the equality bound of the constraint. If not provided, no matrix will be created. + greater_term_matrix: matrix corresponding to the upper bound of the constraint. If not provided, no matrix will be created. + + Returns: + The created binding constraint + + Raises: + MissingTokenError if api_token is missing + BindingConstraintCreationError if an HTTP Exception occurs + """ + base_url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints" + + try: + body = {"name": name} + if properties: + camel_properties = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + body = {**body, **camel_properties} + for matrix, matrix_name in zip( + [less_term_matrix, equal_term_matrix, greater_term_matrix], + ["lessTermMatrix", "equalTermMatrix", "greaterTermMatrix"], + ): + if matrix is not None: + body[matrix_name] = matrix.to_numpy().tolist() + response = self._wrapper.post(base_url, json=body) + created_properties = response.json() + bc_id = created_properties["id"] + for key in ["terms", "id", "name"]: + del created_properties[key] + bc_properties = BindingConstraintProperties.model_validate(created_properties) + bc_terms: List[ConstraintTerm] = [] + + if terms: + json_terms = [term.model_dump() for term in terms] + url = f"{base_url}/{bc_id}/terms" + self._wrapper.post(url, json=json_terms) + + url = f"{base_url}/{bc_id}" + response = self._wrapper.get(url) + created_terms = response.json()["terms"] + bc_terms = [ConstraintTerm.model_validate(term) for term in created_terms] + + except APIError as e: + raise BindingConstraintCreationError(name, e.message) from e + + return BindingConstraint(name, self, bc_properties, bc_terms) + + def delete_binding_constraint_term(self, constraint_id: str, term_id: str) -> None: + url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{constraint_id}/term/{term_id}" + try: + self._wrapper.delete(url) + except APIError as e: + raise ConstraintTermDeletionError(constraint_id, term_id, e.message) from e + + def update_binding_constraint_properties( + self, + binding_constraint: BindingConstraint, + properties: BindingConstraintProperties, + ) -> BindingConstraintProperties: + url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{binding_constraint.id}" + try: + body = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + if not body: + return binding_constraint.properties + + response = self._wrapper.put(url, json=body) + json_response = response.json() + for key in ["terms", "id", "name"]: + del json_response[key] + new_properties = BindingConstraintProperties.model_validate(json_response) + + except APIError as e: + raise ConstraintPropertiesUpdateError(binding_constraint.id, e.message) from e + + return new_properties + + def get_constraint_matrix(self, constraint: BindingConstraint, matrix_name: ConstraintMatrixName) -> pd.DataFrame: + try: + path = PurePosixPath("input") / "bindingconstraints" / f"{constraint.id}_{matrix_name.value}" + return get_matrix( + f"{self._base_url}/studies/{self.study_id}/raw?path={path}", + self._wrapper, + ) + except APIError as e: + raise ConstraintMatrixDownloadError(constraint.id, matrix_name.value, e.message) from e + + def update_constraint_matrix( + self, + constraint: BindingConstraint, + matrix_name: ConstraintMatrixName, + matrix: pd.DataFrame, + ) -> None: + mapping = { + ConstraintMatrixName.LESS_TERM: "lessTermMatrix", + ConstraintMatrixName.GREATER_TERM: "greaterTermMatrix", + ConstraintMatrixName.EQUAL_TERM: "equalTermMatrix", + } + url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{constraint.id}" + try: + body = {mapping[matrix_name]: matrix.to_numpy().tolist()} + self._wrapper.put(url, json=body) + except APIError as e: + raise ConstraintMatrixUpdateError(constraint.id, matrix_name.value, e.message) from e + + def add_constraint_terms(self, constraint: BindingConstraint, terms: List[ConstraintTerm]) -> List[ConstraintTerm]: + url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{constraint.id}" + try: + json_terms = [term.model_dump() for term in terms] + self._wrapper.post(f"{url}/terms", json=json_terms) + response = self._wrapper.get(url) + all_terms = response.json()["terms"] + validated_terms = [ConstraintTerm.model_validate(term) for term in all_terms] + new_terms = [term for term in validated_terms if term.id not in constraint.get_terms()] + + except APIError as e: + raise ConstraintTermAdditionError(constraint.id, [term.id for term in terms], e.message) from e + + return new_terms diff --git a/src/antares/service/api_services/link_api.py b/src/antares/service/api_services/link_api.py index f35973e4..7d97524f 100644 --- a/src/antares/service/api_services/link_api.py +++ b/src/antares/service/api_services/link_api.py @@ -1,186 +1,176 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import json -from types import MappingProxyType -from typing import Optional - -from antares.api_conf.api_conf import APIconf -from antares.api_conf.request_wrapper import RequestWrapper -from antares.exceptions.exceptions import ( - APIError, - LinkCreationError, - LinkDeletionError, - LinkPropertiesUpdateError, - LinkUiUpdateError, -) -from antares.model.area import Area -from antares.model.link import LinkProperties, LinkUi, Link -from antares.service.base_services import BaseLinkService -from antares.tools.ini_tool import check_if_none - - -class LinkApiService(BaseLinkService): - def __init__(self, config: APIconf, study_id: str): - super().__init__() - self.config = config - self.study_id = study_id - self._base_url = f"{self.config.get_host()}/api/v1" - self._wrapper = RequestWrapper(self.config.set_up_api_conf()) - - def create_link( - self, - area_from: Area, - area_to: Area, - properties: Optional[LinkProperties] = None, - ui: Optional[LinkUi] = None, - existing_areas: Optional[MappingProxyType[str, Area]] = None, - ) -> Link: - """ - Args: - area_from: area where the link goes from - area_to: area where the link goes to - properties: link's properties. If not provided, AntaresWeb will use its own default values. - ui: link's ui characteristics. If not provided, AntaresWeb will use its own default values. - existing_areas: existing areas from study - - Returns: - The created link - - Raises: - MissingTokenError if api_token is missing - LinkCreationError if an HTTP Exception occurs - """ - base_url = f"{self._base_url}/studies/{self.study_id}" - # TODO: Currently, AntaresWeb does not have a specific endpoint for links. Once it will, we should change this logic. - area1_id, area2_id = sorted([area_from.id, area_to.id]) - raw_url = f"{base_url}/raw?path=input/links/{area1_id}/properties/{area2_id}" - - try: - url = f"{base_url}/links" - self._wrapper.post(url, json={"area1": area1_id, "area2": area2_id}) - - response = self._wrapper.get(raw_url) - json_file = response.json() - # TODO update to use check_if_none or similar - if properties or ui: - link_properties = json.loads( - check_if_none(properties, LinkProperties()).model_dump_json( - by_alias=True, exclude_none=True - ) - ) - link_ui = json.loads( - check_if_none(ui, LinkUi()).model_dump_json( - by_alias=True, exclude_none=True - ) - ) - body = {**link_properties, **link_ui} - if body: - json_file = _join_filter_values_for_json(json_file, body) - self._wrapper.post(raw_url, json=json_file) - - properties_keys = LinkProperties().model_dump(by_alias=True).keys() - json_properties = {} - for key in properties_keys: - # TODO: This is ugly but the web structure sucks. - value = json_file[key] - if key in ["filter-synthesis", "filter-year-by-year"]: - json_properties[key] = value.split(", ") if value else value - else: - json_properties[key] = value - del json_file[key] - link_ui = LinkUi.model_validate(json_file) - link_properties = LinkProperties.model_validate(json_properties) - - except APIError as e: - raise LinkCreationError(area_from.id, area_to.id, e.message) from e - - return Link(area_from, area_to, self, link_properties, link_ui) - - def delete_link(self, link: Link) -> None: - area_from_id = link.area_from.id - area_to_id = link.area_to.id - url = f"{self._base_url}/studies/{self.study_id}/links/{area_from_id}/{area_to_id}" - try: - self._wrapper.delete(url) - except APIError as e: - raise LinkDeletionError(link.name, e.message) from e - - def update_link_properties( - self, link: Link, properties: LinkProperties - ) -> LinkProperties: - # todo: change this code when AntaresWeb will have a real endpoint - area1_id, area2_id = sorted([link.area_from.id, link.area_to.id]) - raw_url = f"{self._base_url}/studies/{self.study_id}/raw?path=input/links/{area1_id}/properties/{area2_id}" - try: - new_properties = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - if not new_properties: - return link.properties - - response = self._wrapper.get(raw_url) - json_response = response.json() - for key in new_properties: - if key in ["filter-synthesis", "filter-year-by-year"]: - json_response[key] = ",".join(new_properties[key]) - else: - json_response[key] = new_properties[key] - self._wrapper.post(raw_url, json=json_response) - - keys_to_remove = set(LinkUi().model_dump(by_alias=True).keys()) - for key in keys_to_remove: - del json_response[key] - for key in json_response: - if key in ["filter-synthesis", "filter-year-by-year"]: - json_response[key] = json_response[key].split(", ") - - link_properties = LinkProperties.model_validate(json_response) - - except APIError as e: - raise LinkPropertiesUpdateError(link.name, e.message) from e - - return link_properties - - def update_link_ui(self, link: Link, ui: LinkUi) -> LinkUi: - # todo: change this code when AntaresWeb will have a real endpoint - area1_id, area2_id = sorted([link.area_from.id, link.area_to.id]) - raw_url = f"{self._base_url}/studies/{self.study_id}/raw?path=input/links/{area1_id}/properties/{area2_id}" - try: - new_ui = json.loads(ui.model_dump_json(by_alias=True, exclude_none=True)) - if not new_ui: - return link.ui - - response = self._wrapper.get(raw_url) - json_response = response.json() - json_response.update(new_ui) - self._wrapper.post(raw_url, json=json_response) - - keys_to_remove = set(LinkProperties().model_dump(by_alias=True).keys()) - for key in keys_to_remove: - del json_response[key] - - link_ui = LinkUi.model_validate(json_response) - - except APIError as e: - raise LinkUiUpdateError(link.name, e.message) from e - - return link_ui - - -def _join_filter_values_for_json(json_dict: dict, dict_to_extract: dict) -> dict: - for key in dict_to_extract: - if key in ["filter-synthesis", "filter-year-by-year"]: - json_dict[key] = ",".join(dict_to_extract[key]) - else: - json_dict[key] = dict_to_extract[key] - return json_dict +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import json +from types import MappingProxyType +from typing import Optional + +from antares.api_conf.api_conf import APIconf +from antares.api_conf.request_wrapper import RequestWrapper +from antares.exceptions.exceptions import ( + APIError, + LinkCreationError, + LinkDeletionError, + LinkPropertiesUpdateError, + LinkUiUpdateError, +) +from antares.model.area import Area +from antares.model.link import LinkProperties, LinkUi, Link +from antares.service.base_services import BaseLinkService +from antares.tools.ini_tool import check_if_none + + +class LinkApiService(BaseLinkService): + def __init__(self, config: APIconf, study_id: str): + super().__init__() + self.config = config + self.study_id = study_id + self._base_url = f"{self.config.get_host()}/api/v1" + self._wrapper = RequestWrapper(self.config.set_up_api_conf()) + + def create_link( + self, + area_from: Area, + area_to: Area, + properties: Optional[LinkProperties] = None, + ui: Optional[LinkUi] = None, + existing_areas: Optional[MappingProxyType[str, Area]] = None, + ) -> Link: + """ + Args: + area_from: area where the link goes from + area_to: area where the link goes to + properties: link's properties. If not provided, AntaresWeb will use its own default values. + ui: link's ui characteristics. If not provided, AntaresWeb will use its own default values. + existing_areas: existing areas from study + + Returns: + The created link + + Raises: + MissingTokenError if api_token is missing + LinkCreationError if an HTTP Exception occurs + """ + base_url = f"{self._base_url}/studies/{self.study_id}" + # TODO: Currently, AntaresWeb does not have a specific endpoint for links. Once it will, we should change this logic. + area1_id, area2_id = sorted([area_from.id, area_to.id]) + raw_url = f"{base_url}/raw?path=input/links/{area1_id}/properties/{area2_id}" + + try: + url = f"{base_url}/links" + self._wrapper.post(url, json={"area1": area1_id, "area2": area2_id}) + + response = self._wrapper.get(raw_url) + json_file = response.json() + # TODO update to use check_if_none or similar + if properties or ui: + link_properties = json.loads( + check_if_none(properties, LinkProperties()).model_dump_json(by_alias=True, exclude_none=True) + ) + link_ui = json.loads(check_if_none(ui, LinkUi()).model_dump_json(by_alias=True, exclude_none=True)) + body = {**link_properties, **link_ui} + if body: + json_file = _join_filter_values_for_json(json_file, body) + self._wrapper.post(raw_url, json=json_file) + + properties_keys = LinkProperties().model_dump(by_alias=True).keys() + json_properties = {} + for key in properties_keys: + # TODO: This is ugly but the web structure sucks. + value = json_file[key] + if key in ["filter-synthesis", "filter-year-by-year"]: + json_properties[key] = value.split(", ") if value else value + else: + json_properties[key] = value + del json_file[key] + link_ui = LinkUi.model_validate(json_file) + link_properties = LinkProperties.model_validate(json_properties) + + except APIError as e: + raise LinkCreationError(area_from.id, area_to.id, e.message) from e + + return Link(area_from, area_to, self, link_properties, link_ui) + + def delete_link(self, link: Link) -> None: + area_from_id = link.area_from.id + area_to_id = link.area_to.id + url = f"{self._base_url}/studies/{self.study_id}/links/{area_from_id}/{area_to_id}" + try: + self._wrapper.delete(url) + except APIError as e: + raise LinkDeletionError(link.name, e.message) from e + + def update_link_properties(self, link: Link, properties: LinkProperties) -> LinkProperties: + # todo: change this code when AntaresWeb will have a real endpoint + area1_id, area2_id = sorted([link.area_from.id, link.area_to.id]) + raw_url = f"{self._base_url}/studies/{self.study_id}/raw?path=input/links/{area1_id}/properties/{area2_id}" + try: + new_properties = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + if not new_properties: + return link.properties + + response = self._wrapper.get(raw_url) + json_response = response.json() + for key in new_properties: + if key in ["filter-synthesis", "filter-year-by-year"]: + json_response[key] = ",".join(new_properties[key]) + else: + json_response[key] = new_properties[key] + self._wrapper.post(raw_url, json=json_response) + + keys_to_remove = set(LinkUi().model_dump(by_alias=True).keys()) + for key in keys_to_remove: + del json_response[key] + for key in json_response: + if key in ["filter-synthesis", "filter-year-by-year"]: + json_response[key] = json_response[key].split(", ") + + link_properties = LinkProperties.model_validate(json_response) + + except APIError as e: + raise LinkPropertiesUpdateError(link.name, e.message) from e + + return link_properties + + def update_link_ui(self, link: Link, ui: LinkUi) -> LinkUi: + # todo: change this code when AntaresWeb will have a real endpoint + area1_id, area2_id = sorted([link.area_from.id, link.area_to.id]) + raw_url = f"{self._base_url}/studies/{self.study_id}/raw?path=input/links/{area1_id}/properties/{area2_id}" + try: + new_ui = json.loads(ui.model_dump_json(by_alias=True, exclude_none=True)) + if not new_ui: + return link.ui + + response = self._wrapper.get(raw_url) + json_response = response.json() + json_response.update(new_ui) + self._wrapper.post(raw_url, json=json_response) + + keys_to_remove = set(LinkProperties().model_dump(by_alias=True).keys()) + for key in keys_to_remove: + del json_response[key] + + link_ui = LinkUi.model_validate(json_response) + + except APIError as e: + raise LinkUiUpdateError(link.name, e.message) from e + + return link_ui + + +def _join_filter_values_for_json(json_dict: dict, dict_to_extract: dict) -> dict: + for key in dict_to_extract: + if key in ["filter-synthesis", "filter-year-by-year"]: + json_dict[key] = ",".join(dict_to_extract[key]) + else: + json_dict[key] = dict_to_extract[key] + return json_dict diff --git a/src/antares/service/api_services/renewable_api.py b/src/antares/service/api_services/renewable_api.py index 266311f9..9285359b 100644 --- a/src/antares/service/api_services/renewable_api.py +++ b/src/antares/service/api_services/renewable_api.py @@ -1,81 +1,75 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import json -from pathlib import PurePosixPath - -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.api_conf.request_wrapper import RequestWrapper -from antares.exceptions.exceptions import ( - APIError, - RenewablePropertiesUpdateError, - RenewableMatrixDownloadError, -) -from antares.model.renewable import RenewableCluster, RenewableClusterProperties -from antares.service.api_services.utils import get_matrix -from antares.service.base_services import BaseRenewableService - - -class RenewableApiService(BaseRenewableService): - def __init__(self, config: APIconf, study_id: str): - super().__init__() - self.config = config - self.study_id = study_id - self._base_url = f"{self.config.get_host()}/api/v1" - self._wrapper = RequestWrapper(self.config.set_up_api_conf()) - - def update_renewable_properties( - self, - renewable_cluster: RenewableCluster, - properties: RenewableClusterProperties, - ) -> RenewableClusterProperties: - url = f"{self._base_url}/studies/{self.study_id}/areas/{renewable_cluster.area_id}/clusters/renewable/{renewable_cluster.id}" - try: - body = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - if not body: - return renewable_cluster.properties - - response = self._wrapper.patch(url, json=body) - json_response = response.json() - del json_response["id"] - del json_response["name"] - new_properties = RenewableClusterProperties.model_validate(json_response) - - except APIError as e: - raise RenewablePropertiesUpdateError( - renewable_cluster.id, renewable_cluster.area_id, e.message - ) from e - - return new_properties - - def get_renewable_matrix(self, renewable: RenewableCluster) -> pd.DataFrame: - try: - path = ( - PurePosixPath("input") - / "renewables" - / "series" - / f"{renewable.area_id}" - / f"{renewable.name}" - / "series" - ) - return get_matrix( - f"{self._base_url}/studies/{self.study_id}/raw?path={path}", - self._wrapper, - ) - except APIError as e: - raise RenewableMatrixDownloadError( - renewable.area_id, renewable.name, e.message - ) from e +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import json +from pathlib import PurePosixPath + +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.api_conf.request_wrapper import RequestWrapper +from antares.exceptions.exceptions import ( + APIError, + RenewablePropertiesUpdateError, + RenewableMatrixDownloadError, +) +from antares.model.renewable import RenewableCluster, RenewableClusterProperties +from antares.service.api_services.utils import get_matrix +from antares.service.base_services import BaseRenewableService + + +class RenewableApiService(BaseRenewableService): + def __init__(self, config: APIconf, study_id: str): + super().__init__() + self.config = config + self.study_id = study_id + self._base_url = f"{self.config.get_host()}/api/v1" + self._wrapper = RequestWrapper(self.config.set_up_api_conf()) + + def update_renewable_properties( + self, + renewable_cluster: RenewableCluster, + properties: RenewableClusterProperties, + ) -> RenewableClusterProperties: + url = f"{self._base_url}/studies/{self.study_id}/areas/{renewable_cluster.area_id}/clusters/renewable/{renewable_cluster.id}" + try: + body = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + if not body: + return renewable_cluster.properties + + response = self._wrapper.patch(url, json=body) + json_response = response.json() + del json_response["id"] + del json_response["name"] + new_properties = RenewableClusterProperties.model_validate(json_response) + + except APIError as e: + raise RenewablePropertiesUpdateError(renewable_cluster.id, renewable_cluster.area_id, e.message) from e + + return new_properties + + def get_renewable_matrix(self, renewable: RenewableCluster) -> pd.DataFrame: + try: + path = ( + PurePosixPath("input") + / "renewables" + / "series" + / f"{renewable.area_id}" + / f"{renewable.name}" + / "series" + ) + return get_matrix( + f"{self._base_url}/studies/{self.study_id}/raw?path={path}", + self._wrapper, + ) + except APIError as e: + raise RenewableMatrixDownloadError(renewable.area_id, renewable.name, e.message) from e diff --git a/src/antares/service/api_services/st_storage_api.py b/src/antares/service/api_services/st_storage_api.py index f67475b7..9e571f2b 100644 --- a/src/antares/service/api_services/st_storage_api.py +++ b/src/antares/service/api_services/st_storage_api.py @@ -1,91 +1,77 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import json - -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.api_conf.request_wrapper import RequestWrapper -from antares.exceptions.exceptions import ( - APIError, - STStoragePropertiesUpdateError, - STStorageMatrixUploadError, - STStorageMatrixDownloadError, -) -from antares.model.st_storage import STStorage, STStorageProperties, STStorageMatrixName -from antares.service.base_services import BaseShortTermStorageService - - -class ShortTermStorageApiService(BaseShortTermStorageService): - def __init__(self, config: APIconf, study_id: str): - super().__init__() - self.config = config - self.study_id = study_id - self._base_url = f"{self.config.get_host()}/api/v1" - self._wrapper = RequestWrapper(self.config.set_up_api_conf()) - - def update_st_storage_properties( - self, st_storage: STStorage, properties: STStorageProperties - ) -> STStorageProperties: - url = f"{self._base_url}/studies/{self.study_id}/areas/{st_storage.area_id}/storages/{st_storage.id}" - try: - body = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - if not body: - return st_storage.properties - - response = self._wrapper.patch(url, json=body) - json_response = response.json() - del json_response["id"] - del json_response["name"] - new_properties = STStorageProperties.model_validate(json_response) - - except APIError as e: - raise STStoragePropertiesUpdateError( - st_storage.id, st_storage.area_id, e.message - ) from e - - return new_properties - - def upload_storage_matrix( - self, storage: STStorage, ts_name: STStorageMatrixName, matrix: pd.DataFrame - ) -> None: - url = f"{self._base_url}/studies/{self.study_id}/areas/{storage.area_id}/storages/{storage.id}/series/{ts_name.value}" - try: - body = { - "data": matrix.to_numpy().tolist(), - "index": matrix.index.tolist(), - "columns": matrix.columns.tolist(), - } - self._wrapper.put(url, json=body) - except APIError as e: - raise STStorageMatrixUploadError( - storage.area_id, storage.id, ts_name.value, e.message - ) from e - - def get_storage_matrix( - self, storage: STStorage, ts_name: STStorageMatrixName - ) -> pd.DataFrame: - url = f"{self._base_url}/studies/{self.study_id}/areas/{storage.area_id}/storages/{storage.id}/series/{ts_name.value}" - try: - response = self._wrapper.get(url) - json_df = response.json() - dataframe = pd.DataFrame( - data=json_df["data"], index=json_df["index"], columns=json_df["columns"] - ) - except APIError as e: - raise STStorageMatrixDownloadError( - storage.area_id, storage.id, ts_name.value, e.message - ) from e - return dataframe +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import json + +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.api_conf.request_wrapper import RequestWrapper +from antares.exceptions.exceptions import ( + APIError, + STStoragePropertiesUpdateError, + STStorageMatrixUploadError, + STStorageMatrixDownloadError, +) +from antares.model.st_storage import STStorage, STStorageProperties, STStorageMatrixName +from antares.service.base_services import BaseShortTermStorageService + + +class ShortTermStorageApiService(BaseShortTermStorageService): + def __init__(self, config: APIconf, study_id: str): + super().__init__() + self.config = config + self.study_id = study_id + self._base_url = f"{self.config.get_host()}/api/v1" + self._wrapper = RequestWrapper(self.config.set_up_api_conf()) + + def update_st_storage_properties( + self, st_storage: STStorage, properties: STStorageProperties + ) -> STStorageProperties: + url = f"{self._base_url}/studies/{self.study_id}/areas/{st_storage.area_id}/storages/{st_storage.id}" + try: + body = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + if not body: + return st_storage.properties + + response = self._wrapper.patch(url, json=body) + json_response = response.json() + del json_response["id"] + del json_response["name"] + new_properties = STStorageProperties.model_validate(json_response) + + except APIError as e: + raise STStoragePropertiesUpdateError(st_storage.id, st_storage.area_id, e.message) from e + + return new_properties + + def upload_storage_matrix(self, storage: STStorage, ts_name: STStorageMatrixName, matrix: pd.DataFrame) -> None: + url = f"{self._base_url}/studies/{self.study_id}/areas/{storage.area_id}/storages/{storage.id}/series/{ts_name.value}" + try: + body = { + "data": matrix.to_numpy().tolist(), + "index": matrix.index.tolist(), + "columns": matrix.columns.tolist(), + } + self._wrapper.put(url, json=body) + except APIError as e: + raise STStorageMatrixUploadError(storage.area_id, storage.id, ts_name.value, e.message) from e + + def get_storage_matrix(self, storage: STStorage, ts_name: STStorageMatrixName) -> pd.DataFrame: + url = f"{self._base_url}/studies/{self.study_id}/areas/{storage.area_id}/storages/{storage.id}/series/{ts_name.value}" + try: + response = self._wrapper.get(url) + json_df = response.json() + dataframe = pd.DataFrame(data=json_df["data"], index=json_df["index"], columns=json_df["columns"]) + except APIError as e: + raise STStorageMatrixDownloadError(storage.area_id, storage.id, ts_name.value, e.message) from e + return dataframe diff --git a/src/antares/service/api_services/study_api.py b/src/antares/service/api_services/study_api.py index 2b24efaa..83c653f1 100644 --- a/src/antares/service/api_services/study_api.py +++ b/src/antares/service/api_services/study_api.py @@ -1,116 +1,112 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import json -from typing import Optional - -from antares.api_conf.api_conf import APIconf -from antares.api_conf.request_wrapper import RequestWrapper -from antares.exceptions.exceptions import ( - APIError, - StudySettingsUpdateError, - BindingConstraintDeletionError, - StudyDeletionError, -) -from antares.model.binding_constraint import BindingConstraint -from antares.model.settings import ( - StudySettings, - GeneralProperties, - ThematicTrimming, - TimeSeriesProperties, - AdequacyPatchProperties, - AdvancedProperties, - OptimizationProperties, -) -from antares.service.base_services import BaseStudyService - - -def _returns_study_settings( - base_url: str, - study_id: str, - wrapper: RequestWrapper, - update: bool, - settings: Optional[StudySettings], -) -> Optional[StudySettings]: - settings_base_url = f"{base_url}/studies/{study_id}/config" - mapping = { - "general_properties": ("general", GeneralProperties), - "thematic_trimming": ("thematictrimming", ThematicTrimming), - "time_series_properties": ("timeseries", TimeSeriesProperties), - "adequacy_patch_properties": ("adequacypatch", AdequacyPatchProperties), - "advanced_properties": ("advancedparameters", AdvancedProperties), - "optimization_properties": ("optimization", OptimizationProperties), - "playlist": ("playlist", None), - } - if settings: - json_settings = json.loads( - settings.model_dump_json(by_alias=True, exclude_none=True) - ) - if not json_settings and update: - return None - - for key, value in json_settings.items(): - url = f"{settings_base_url}/{mapping[key][0]}/form" - wrapper.put(url, json=value) - - json_settings = {} - for settings_type, settings_tuple in mapping.items(): - settings_class = settings_tuple[1] - url = f"{settings_base_url}/{settings_tuple[0]}/form" - response = wrapper.get(url) - if settings_type == "playlist": - settings_property = response.json() - else: - settings_property = settings_class.model_validate(response.json()) # type: ignore - json_settings[settings_type] = settings_property - - return StudySettings.model_validate(json_settings) - - -class StudyApiService(BaseStudyService): - def __init__(self, config: APIconf, study_id: str): - super().__init__() - self._config = config - self._study_id = study_id - self._base_url = f"{self.config.get_host()}/api/v1" - self._wrapper = RequestWrapper(self.config.set_up_api_conf()) - - @property - def study_id(self) -> str: - return self._study_id - - @property - def config(self) -> APIconf: - return self._config - - def update_study_settings(self, settings: StudySettings) -> Optional[StudySettings]: - try: - new_settings = _returns_study_settings( - self._base_url, self.study_id, self._wrapper, True, settings - ) - except APIError as e: - raise StudySettingsUpdateError(self.study_id, e.message) from e - return new_settings - - def delete_binding_constraint(self, constraint: BindingConstraint) -> None: - url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{constraint.id}" - try: - self._wrapper.delete(url) - except APIError as e: - raise BindingConstraintDeletionError(constraint.id, e.message) from e - - def delete(self, children: bool) -> None: - url = f"{self._base_url}/studies/{self.study_id}?children={children}" - try: - self._wrapper.delete(url) - except APIError as e: - raise StudyDeletionError(self.study_id, e.message) from e +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import json +from typing import Optional + +from antares.api_conf.api_conf import APIconf +from antares.api_conf.request_wrapper import RequestWrapper +from antares.exceptions.exceptions import ( + APIError, + StudySettingsUpdateError, + BindingConstraintDeletionError, + StudyDeletionError, +) +from antares.model.binding_constraint import BindingConstraint +from antares.model.settings import ( + StudySettings, + GeneralProperties, + ThematicTrimming, + TimeSeriesProperties, + AdequacyPatchProperties, + AdvancedProperties, + OptimizationProperties, +) +from antares.service.base_services import BaseStudyService + + +def _returns_study_settings( + base_url: str, + study_id: str, + wrapper: RequestWrapper, + update: bool, + settings: Optional[StudySettings], +) -> Optional[StudySettings]: + settings_base_url = f"{base_url}/studies/{study_id}/config" + mapping = { + "general_properties": ("general", GeneralProperties), + "thematic_trimming": ("thematictrimming", ThematicTrimming), + "time_series_properties": ("timeseries", TimeSeriesProperties), + "adequacy_patch_properties": ("adequacypatch", AdequacyPatchProperties), + "advanced_properties": ("advancedparameters", AdvancedProperties), + "optimization_properties": ("optimization", OptimizationProperties), + "playlist": ("playlist", None), + } + if settings: + json_settings = json.loads(settings.model_dump_json(by_alias=True, exclude_none=True)) + if not json_settings and update: + return None + + for key, value in json_settings.items(): + url = f"{settings_base_url}/{mapping[key][0]}/form" + wrapper.put(url, json=value) + + json_settings = {} + for settings_type, settings_tuple in mapping.items(): + settings_class = settings_tuple[1] + url = f"{settings_base_url}/{settings_tuple[0]}/form" + response = wrapper.get(url) + if settings_type == "playlist": + settings_property = response.json() + else: + settings_property = settings_class.model_validate(response.json()) # type: ignore + json_settings[settings_type] = settings_property + + return StudySettings.model_validate(json_settings) + + +class StudyApiService(BaseStudyService): + def __init__(self, config: APIconf, study_id: str): + super().__init__() + self._config = config + self._study_id = study_id + self._base_url = f"{self.config.get_host()}/api/v1" + self._wrapper = RequestWrapper(self.config.set_up_api_conf()) + + @property + def study_id(self) -> str: + return self._study_id + + @property + def config(self) -> APIconf: + return self._config + + def update_study_settings(self, settings: StudySettings) -> Optional[StudySettings]: + try: + new_settings = _returns_study_settings(self._base_url, self.study_id, self._wrapper, True, settings) + except APIError as e: + raise StudySettingsUpdateError(self.study_id, e.message) from e + return new_settings + + def delete_binding_constraint(self, constraint: BindingConstraint) -> None: + url = f"{self._base_url}/studies/{self.study_id}/bindingconstraints/{constraint.id}" + try: + self._wrapper.delete(url) + except APIError as e: + raise BindingConstraintDeletionError(constraint.id, e.message) from e + + def delete(self, children: bool) -> None: + url = f"{self._base_url}/studies/{self.study_id}?children={children}" + try: + self._wrapper.delete(url) + except APIError as e: + raise StudyDeletionError(self.study_id, e.message) from e diff --git a/src/antares/service/api_services/thermal_api.py b/src/antares/service/api_services/thermal_api.py index 191a9488..06890ca7 100644 --- a/src/antares/service/api_services/thermal_api.py +++ b/src/antares/service/api_services/thermal_api.py @@ -1,86 +1,80 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import json -from pathlib import PurePosixPath - -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.api_conf.request_wrapper import RequestWrapper -from antares.exceptions.exceptions import ( - APIError, - ThermalPropertiesUpdateError, - ThermalMatrixDownloadError, -) -from antares.model.thermal import ( - ThermalCluster, - ThermalClusterProperties, - ThermalClusterMatrixName, -) -from antares.service.api_services.utils import get_matrix -from antares.service.base_services import BaseThermalService - - -class ThermalApiService(BaseThermalService): - def __init__(self, config: APIconf, study_id: str): - super().__init__() - self.config = config - self.study_id = study_id - self._base_url = f"{self.config.get_host()}/api/v1" - self._wrapper = RequestWrapper(self.config.set_up_api_conf()) - - def update_thermal_properties( - self, thermal_cluster: ThermalCluster, properties: ThermalClusterProperties - ) -> ThermalClusterProperties: - url = f"{self._base_url}/studies/{self.study_id}/areas/{thermal_cluster.area_id}/clusters/thermal/{thermal_cluster.id}" - try: - body = json.loads( - properties.model_dump_json(by_alias=True, exclude_none=True) - ) - if not body: - return thermal_cluster.properties - - response = self._wrapper.patch(url, json=body) - json_response = response.json() - del json_response["id"] - del json_response["name"] - new_properties = ThermalClusterProperties.model_validate(json_response) - - except APIError as e: - raise ThermalPropertiesUpdateError( - thermal_cluster.id, thermal_cluster.area_id, e.message - ) from e - - return new_properties - - def get_thermal_matrix( - self, thermal_cluster: ThermalCluster, ts_name: ThermalClusterMatrixName - ) -> pd.DataFrame: - try: - keyword = "series" if "SERIES" in ts_name.name else "prepro" - path = ( - PurePosixPath("input") - / "thermal" - / keyword - / f"{thermal_cluster.area_id}" - / f"{thermal_cluster.name.lower()}" - / ts_name.value - ) - return get_matrix( - f"{self._base_url}/studies/{self.study_id}/raw?path={path}", - self._wrapper, - ) - except APIError as e: - raise ThermalMatrixDownloadError( - thermal_cluster.area_id, thermal_cluster.name, ts_name.value, e.message - ) from e +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import json +from pathlib import PurePosixPath + +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.api_conf.request_wrapper import RequestWrapper +from antares.exceptions.exceptions import ( + APIError, + ThermalPropertiesUpdateError, + ThermalMatrixDownloadError, +) +from antares.model.thermal import ( + ThermalCluster, + ThermalClusterProperties, + ThermalClusterMatrixName, +) +from antares.service.api_services.utils import get_matrix +from antares.service.base_services import BaseThermalService + + +class ThermalApiService(BaseThermalService): + def __init__(self, config: APIconf, study_id: str): + super().__init__() + self.config = config + self.study_id = study_id + self._base_url = f"{self.config.get_host()}/api/v1" + self._wrapper = RequestWrapper(self.config.set_up_api_conf()) + + def update_thermal_properties( + self, thermal_cluster: ThermalCluster, properties: ThermalClusterProperties + ) -> ThermalClusterProperties: + url = f"{self._base_url}/studies/{self.study_id}/areas/{thermal_cluster.area_id}/clusters/thermal/{thermal_cluster.id}" + try: + body = json.loads(properties.model_dump_json(by_alias=True, exclude_none=True)) + if not body: + return thermal_cluster.properties + + response = self._wrapper.patch(url, json=body) + json_response = response.json() + del json_response["id"] + del json_response["name"] + new_properties = ThermalClusterProperties.model_validate(json_response) + + except APIError as e: + raise ThermalPropertiesUpdateError(thermal_cluster.id, thermal_cluster.area_id, e.message) from e + + return new_properties + + def get_thermal_matrix(self, thermal_cluster: ThermalCluster, ts_name: ThermalClusterMatrixName) -> pd.DataFrame: + try: + keyword = "series" if "SERIES" in ts_name.name else "prepro" + path = ( + PurePosixPath("input") + / "thermal" + / keyword + / f"{thermal_cluster.area_id}" + / f"{thermal_cluster.name.lower()}" + / ts_name.value + ) + return get_matrix( + f"{self._base_url}/studies/{self.study_id}/raw?path={path}", + self._wrapper, + ) + except APIError as e: + raise ThermalMatrixDownloadError( + thermal_cluster.area_id, thermal_cluster.name, ts_name.value, e.message + ) from e diff --git a/src/antares/service/api_services/utils.py b/src/antares/service/api_services/utils.py index a1cc26b2..32aa2184 100644 --- a/src/antares/service/api_services/utils.py +++ b/src/antares/service/api_services/utils.py @@ -1,24 +1,22 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import pandas as pd - -from antares.api_conf.request_wrapper import RequestWrapper - - -def get_matrix(url: str, wrapper: RequestWrapper) -> pd.DataFrame: - response = wrapper.get(url) - json_df = response.json() - dataframe = pd.DataFrame( - data=json_df["data"], index=json_df["index"], columns=json_df["columns"] - ) - return dataframe +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import pandas as pd + +from antares.api_conf.request_wrapper import RequestWrapper + + +def get_matrix(url: str, wrapper: RequestWrapper) -> pd.DataFrame: + response = wrapper.get(url) + json_df = response.json() + dataframe = pd.DataFrame(data=json_df["data"], index=json_df["index"], columns=json_df["columns"]) + return dataframe diff --git a/src/antares/service/base_services.py b/src/antares/service/base_services.py index a535fdd5..fac090ac 100644 --- a/src/antares/service/base_services.py +++ b/src/antares/service/base_services.py @@ -1,660 +1,644 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from abc import ABC, abstractmethod -from types import MappingProxyType -from typing import Optional, Dict, List - -import pandas as pd - -from antares.config.base_configuration import BaseConfiguration -from antares.model.area import AreaProperties, Area, AreaUi -from antares.model.binding_constraint import ( - BindingConstraintProperties, - ConstraintTerm, - BindingConstraint, - ConstraintMatrixName, -) -from antares.model.hydro import HydroProperties, HydroMatrixName, Hydro -from antares.model.link import LinkProperties, LinkUi, Link -from antares.model.misc_gen import MiscGen -from antares.model.renewable import RenewableClusterProperties, RenewableCluster -from antares.model.reserves import Reserves -from antares.model.settings import StudySettings -from antares.model.solar import Solar -from antares.model.st_storage import STStorageProperties, STStorage -from antares.model.thermal import ( - ThermalClusterProperties, - ThermalCluster, - ThermalClusterMatrixName, -) -from antares.model.wind import Wind - - -class BaseAreaService(ABC): - @abstractmethod - def set_storage_service( - self, storage_service: "BaseShortTermStorageService" - ) -> None: - pass - - @abstractmethod - def set_thermal_service(self, thermal_service: "BaseThermalService") -> None: - pass - - @abstractmethod - def set_renewable_service(self, renewable_service: "BaseRenewableService") -> None: - pass - - @abstractmethod - def create_area( - self, - area_name: str, - properties: Optional[AreaProperties] = None, - ui: Optional[AreaUi] = None, - ) -> Area: - pass - - @abstractmethod - def create_thermal_cluster( - self, - area_id: str, - thermal_name: str, - properties: Optional[ThermalClusterProperties] = None, - ) -> ThermalCluster: - """ - Args: - area_id: the area id in which to create the thermal cluster - thermal_name: the name of the thermal cluster - properties: the properties of the thermal cluster. If not provided, AntaresWeb will use its own default values. - - Returns: - The created thermal cluster - """ - pass - - @abstractmethod - def create_thermal_cluster_with_matrices( - self, - area_id: str, - cluster_name: str, - parameters: ThermalClusterProperties, - prepro: Optional[pd.DataFrame], - modulation: Optional[pd.DataFrame], - series: Optional[pd.DataFrame], - CO2Cost: Optional[pd.DataFrame], - fuelCost: Optional[pd.DataFrame], - ) -> ThermalCluster: - """ - - Args: - - area_id: area id in which to create the thermal cluster - cluster_name: thermal cluster nam - parameters: properties of the thermal cluster. - prepro: matrix corresponding to prepro/data.txt - modulation: matrix corresponding to prepro/modulation.txt - series: matrix for series/series.txt - CO2Cost: matrix for series/CO2Cost.txt - fuelCost: matrix for series/fuelCost.txt - - Returns: - Thermal cluster created - """ - pass - - @abstractmethod - def create_renewable_cluster( - self, - area_id: str, - renewable_name: str, - properties: Optional[RenewableClusterProperties], - series: Optional[pd.DataFrame], - ) -> RenewableCluster: - """ - Args: - area_id: the area id in which to create the renewable cluster - renewable_name: the name of the renewable cluster - properties: the properties of the renewable cluster. If not provided, AntaresWeb will use its own default values. - series: matrix for renewables/area_id/renewable_name/series.txt - - Returns: - The created renewable cluster - """ - pass - - @abstractmethod - def create_st_storage( - self, - area_id: str, - st_storage_name: str, - properties: Optional[STStorageProperties] = None, - ) -> STStorage: - """ - Args: - - area_id: the area id in which to create the short term storage - st_storage_name: the name of the short term storage - properties: the properties of the short term storage. If not provided, AntaresWeb will use its own default values. - - Returns: - The created short term storage - """ - pass - - @abstractmethod - def create_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: - """ - Args: - area: area to create wind series matrices - series: wind/series/wind_{area_id}.txt - - """ - pass - - @abstractmethod - def create_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: - """ - Args: - area: Area to create reserves series matrices - series: Pandas dataframe stored in reserves/{area_id}.txt - - Returns: - Reserves object with the provided Pandas dataframe - """ - pass - - @abstractmethod - def create_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: - """ - Args: - area: area to create reserves series matrices - series: solar/series/solar_{area_id}.txt - - """ - pass - - @abstractmethod - def create_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: - """ - Args: - area: area to create reserves series matrices - series: misc-gen/miscgen-{area_id}.txt - - """ - pass - - @abstractmethod - def create_hydro( - self, - area_id: str, - properties: Optional[HydroProperties], - matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]], - ) -> Hydro: - """ - Args: - area_id: area in which hydro will be created - properties: hydro properties - matrices: matrices for hydro to be created - - """ - pass - - @abstractmethod - def update_area_properties( - self, area: Area, properties: AreaProperties - ) -> AreaProperties: - """ - Args: - area: concerned area - properties: new properties. Only registered fields will be updated. - """ - pass - - @abstractmethod - def update_area_ui(self, area: Area, ui: AreaUi) -> AreaUi: - """ - Args: - area: concerned area - ui: new ui. Only registered fields will be updated. - """ - pass - - @abstractmethod - def delete_area(self, area: Area) -> None: - """ - Args: - area: area object to be deleted - """ - pass - - @abstractmethod - def delete_thermal_clusters( - self, area: Area, thermal_clusters: List[ThermalCluster] - ) -> None: - """ - Args: - area: area containing the cluster - thermal_clusters: List of thermal clusters object to be deleted - """ - pass - - @abstractmethod - def delete_renewable_clusters( - self, area: Area, renewable_clusters: List[RenewableCluster] - ) -> None: - """ - Args: - area: area containing the cluster - renewable_clusters: List of renewable clusters object to be deleted - """ - pass - - @abstractmethod - def delete_st_storages(self, area: Area, storages: List[STStorage]) -> None: - """ - Args: - area: area containing the cluster - storages: List of short term storage objects to be deleted - """ - pass - - @abstractmethod - def upload_load_matrix(self, area: Area, load_matrix: pd.DataFrame) -> None: - """ - Args: - area: concerned area. - load_matrix: matrix in Dataframe format to write as the load matrix. - """ - # todo: What happens when given an empty DataFrame ? AntaresWeb doesn't handle such a case. - pass - - @abstractmethod - def get_load_matrix(self, area: Area) -> pd.DataFrame: - """ - Args: - area: concerned area. - """ - # todo: Currently we do not return index and column names because AntaresWeb doesn't send the full information. - # Once it will, there will be no change to do in the code on our side. - pass - - @abstractmethod - def read_thermal_cluster( - self, - area_id: str, - thermal_name: str, - properties: Optional[ThermalClusterProperties] = None, - ) -> ThermalCluster: - pass - - @abstractmethod - def read_renewable_cluster( - self, - area_id: str, - renewable_name: str, - properties: Optional[RenewableClusterProperties] = None, - series: Optional[pd.DataFrame] = None, - ) -> RenewableCluster: - pass - - @abstractmethod - def read_st_storage( - self, - area_id: str, - st_storage_name: str, - properties: Optional[STStorageProperties] = None, - ) -> STStorage: - pass - - @abstractmethod - def read_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: - pass - - @abstractmethod - def read_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: - pass - - @abstractmethod - def read_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: - pass - - @abstractmethod - def read_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: - pass - - @abstractmethod - def read_hydro( - self, - area_id: str, - properties: Optional[HydroProperties] = None, - matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]] = None, - ) -> Hydro: - pass - - @abstractmethod - def read_area( - self, - area_name: str, - properties: Optional[AreaProperties] = None, - ui: Optional[AreaUi] = None, - ) -> Area: - """ - Args: - area_name: area to be added to study - properties: area's properties. If not provided, default values will be used. - ui: area's ui characteristics. If not provided, default values will be used. - - Returns: area name if success or Error if area can not be - created - """ - - pass - - -class BaseLinkService(ABC): - @abstractmethod - def create_link( - self, - area_from: Area, - area_to: Area, - properties: Optional[LinkProperties] = None, - ui: Optional[LinkUi] = None, - existing_areas: Optional[MappingProxyType[str, Area]] = None, - ) -> Link: - """ - Args: - area_from: area where the link goes from - area_to: area where the link goes to - properties: link's properties - ui: link's ui characteristics - existing_areas: existing areas from study - - Returns: - The created link - """ - pass - - @abstractmethod - def delete_link(self, link: Link) -> None: - """ - Args: - link: link object to be deleted - """ - pass - - @abstractmethod - def update_link_properties( - self, link: Link, properties: LinkProperties - ) -> LinkProperties: - """ - Args: - link: concerned link - properties: new properties. Only registered fields will be updated. - """ - pass - - @abstractmethod - def update_link_ui(self, link: Link, ui: LinkUi) -> LinkUi: - """ - Args: - link: concerned link - ui: new ui. Only registered fields will be updated. - """ - pass - - @abstractmethod - def read_link( - self, - area_from: Area, - area_to: Area, - properties: Optional[LinkProperties] = None, - ui: Optional[LinkUi] = None, - existing_areas: Optional[MappingProxyType[str, Area]] = None, - ) -> Link: - """ - Args: - area_from: area where the link goes from - area_to: area where the link goes to - properties: link's properties - ui: link's ui characteristics - existing_areas: existing areas from study - - Returns: - The created link - """ - pass - - -class BaseThermalService(ABC): - @abstractmethod - def update_thermal_properties( - self, thermal_cluster: ThermalCluster, properties: ThermalClusterProperties - ) -> ThermalClusterProperties: - """ - Args: - thermal_cluster: concerned cluster - properties: new properties. Only registered fields will be updated. - """ - pass - - @abstractmethod - def get_thermal_matrix( - self, thermal_cluster: ThermalCluster, ts_name: ThermalClusterMatrixName - ) -> pd.DataFrame: - """ - Args: - thermal_cluster: cluster to retrieve matrix - ts_name: matrix name - - Returns: matrix requested - - """ - pass - - -class BaseBindingConstraintService(ABC): - @abstractmethod - def create_binding_constraint( - self, - name: str, - properties: Optional[BindingConstraintProperties] = None, - terms: Optional[List[ConstraintTerm]] = None, - less_term_matrix: Optional[pd.DataFrame] = None, - equal_term_matrix: Optional[pd.DataFrame] = None, - greater_term_matrix: Optional[pd.DataFrame] = None, - ) -> BindingConstraint: - """ - Args: - name: the binding constraint name - properties: the properties of the constraint. If not provided, AntaresWeb will use its own default values. - terms: the terms of the constraint. If not provided, no term will be created. - less_term_matrix: matrix corresponding to the lower bound of the constraint. If not provided, no matrix will be created. - equal_term_matrix: matrix corresponding to the equality bound of the constraint. If not provided, no matrix will be created. - greater_term_matrix: matrix corresponding to the upper bound of the constraint. If not provided, no matrix will be created. - - Returns: - The created binding constraint - """ - pass - - @abstractmethod - def add_constraint_terms( - self, constraint: BindingConstraint, terms: List[ConstraintTerm] - ) -> List[ConstraintTerm]: - """ - Args: - constraint: the concerned binding constraint - terms: the terms to add to the constraint. - - Returns: - The created terms - """ - pass - - @abstractmethod - def delete_binding_constraint_term(self, constraint_id: str, term_id: str) -> None: - """ - Args: - constraint_id: binding constraint's id containing the term - term_id: binding constraint term to be deleted - """ - pass - - @abstractmethod - def update_binding_constraint_properties( - self, - binding_constraint: BindingConstraint, - properties: BindingConstraintProperties, - ) -> BindingConstraintProperties: - """ - Args: - binding_constraint: concerned binding_constraint - properties: new properties. Only registered fields will be updated. - """ - pass - - @abstractmethod - def get_constraint_matrix( - self, constraint: BindingConstraint, matrix_name: ConstraintMatrixName - ) -> pd.DataFrame: - """ - Args: - constraint: the concerned binding constraint - matrix_name: the matrix suffix. - """ - pass - - @abstractmethod - def update_constraint_matrix( - self, - constraint: BindingConstraint, - matrix_name: ConstraintMatrixName, - matrix: pd.DataFrame, - ) -> None: - """ - Args: - constraint: the concerned binding constraint - matrix_name: the matrix suffix. - matrix: matrix to upload (in Dataframe format) - """ - pass - - @abstractmethod - def read_binding_constraint( - self, - name: str, - properties: Optional[BindingConstraintProperties] = None, - terms: Optional[List[ConstraintTerm]] = None, - less_term_matrix: Optional[pd.DataFrame] = None, - equal_term_matrix: Optional[pd.DataFrame] = None, - greater_term_matrix: Optional[pd.DataFrame] = None, - ) -> BindingConstraint: - """ - Args: - name: the binding constraint name - properties: the properties of the constraint. If not provided, AntaresWeb will use its own default values. - terms: the terms of the constraint. If not provided, no term will be created. - less_term_matrix: matrix corresponding to the lower bound of the constraint. If not provided, no matrix will be created. - equal_term_matrix: matrix corresponding to the equality bound of the constraint. If not provided, no matrix will be created. - greater_term_matrix: matrix corresponding to the upper bound of the constraint. If not provided, no matrix will be created. - - Returns: - The created binding constraint - """ - pass - - -class BaseStudyService(ABC): - @property - @abstractmethod - def study_id(self) -> str: - """The ID for the study""" - pass - - @property - @abstractmethod - def config(self) -> BaseConfiguration: - """The configuration of the study.""" - pass - - @abstractmethod - def update_study_settings(self, settings: StudySettings) -> Optional[StudySettings]: - """ - Args: - settings: new study settings. Only registered fields will be updated. - """ - pass - - @abstractmethod - def delete_binding_constraint(self, constraint: BindingConstraint) -> None: - """ - Args: - constraint: binding constraint object to be deleted - """ - pass - - @abstractmethod - def delete(self, children: bool) -> None: - """ - Deletes the study and its children if children is True - """ - pass - - @abstractmethod - def read_areas(self) -> None: - """ - Read areas - """ - pass - - -class BaseRenewableService(ABC): - @abstractmethod - def update_renewable_properties( - self, - renewable_cluster: RenewableCluster, - properties: RenewableClusterProperties, - ) -> RenewableClusterProperties: - """ - Args: - renewable_cluster: concerned cluster - properties: new properties. Only registered fields will be updated. - """ - pass - - @abstractmethod - def get_renewable_matrix( - self, - renewable: RenewableCluster, - ) -> pd.DataFrame: - """ - Args: - renewable: renewable cluster to retrieve matrix - - Returns: matrix requested - - """ - pass - - -class BaseShortTermStorageService(ABC): - @abstractmethod - def update_st_storage_properties( - self, st_storage: STStorage, properties: STStorageProperties - ) -> STStorageProperties: - """ - Args: - st_storage: concerned storage - properties: new properties. Only registered fields will be updated. - """ - pass +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from abc import ABC, abstractmethod +from types import MappingProxyType +from typing import Optional, Dict, List + +import pandas as pd + +from antares.config.base_configuration import BaseConfiguration +from antares.model.area import AreaProperties, Area, AreaUi +from antares.model.binding_constraint import ( + BindingConstraintProperties, + ConstraintTerm, + BindingConstraint, + ConstraintMatrixName, +) +from antares.model.hydro import HydroProperties, HydroMatrixName, Hydro +from antares.model.link import LinkProperties, LinkUi, Link +from antares.model.misc_gen import MiscGen +from antares.model.renewable import RenewableClusterProperties, RenewableCluster +from antares.model.reserves import Reserves +from antares.model.settings import StudySettings +from antares.model.solar import Solar +from antares.model.st_storage import STStorageProperties, STStorage +from antares.model.thermal import ( + ThermalClusterProperties, + ThermalCluster, + ThermalClusterMatrixName, +) +from antares.model.wind import Wind + + +class BaseAreaService(ABC): + @abstractmethod + def set_storage_service(self, storage_service: "BaseShortTermStorageService") -> None: + pass + + @abstractmethod + def set_thermal_service(self, thermal_service: "BaseThermalService") -> None: + pass + + @abstractmethod + def set_renewable_service(self, renewable_service: "BaseRenewableService") -> None: + pass + + @abstractmethod + def create_area( + self, + area_name: str, + properties: Optional[AreaProperties] = None, + ui: Optional[AreaUi] = None, + ) -> Area: + pass + + @abstractmethod + def create_thermal_cluster( + self, + area_id: str, + thermal_name: str, + properties: Optional[ThermalClusterProperties] = None, + ) -> ThermalCluster: + """ + Args: + area_id: the area id in which to create the thermal cluster + thermal_name: the name of the thermal cluster + properties: the properties of the thermal cluster. If not provided, AntaresWeb will use its own default values. + + Returns: + The created thermal cluster + """ + pass + + @abstractmethod + def create_thermal_cluster_with_matrices( + self, + area_id: str, + cluster_name: str, + parameters: ThermalClusterProperties, + prepro: Optional[pd.DataFrame], + modulation: Optional[pd.DataFrame], + series: Optional[pd.DataFrame], + CO2Cost: Optional[pd.DataFrame], + fuelCost: Optional[pd.DataFrame], + ) -> ThermalCluster: + """ + + Args: + + area_id: area id in which to create the thermal cluster + cluster_name: thermal cluster nam + parameters: properties of the thermal cluster. + prepro: matrix corresponding to prepro/data.txt + modulation: matrix corresponding to prepro/modulation.txt + series: matrix for series/series.txt + CO2Cost: matrix for series/CO2Cost.txt + fuelCost: matrix for series/fuelCost.txt + + Returns: + Thermal cluster created + """ + pass + + @abstractmethod + def create_renewable_cluster( + self, + area_id: str, + renewable_name: str, + properties: Optional[RenewableClusterProperties], + series: Optional[pd.DataFrame], + ) -> RenewableCluster: + """ + Args: + area_id: the area id in which to create the renewable cluster + renewable_name: the name of the renewable cluster + properties: the properties of the renewable cluster. If not provided, AntaresWeb will use its own default values. + series: matrix for renewables/area_id/renewable_name/series.txt + + Returns: + The created renewable cluster + """ + pass + + @abstractmethod + def create_st_storage( + self, + area_id: str, + st_storage_name: str, + properties: Optional[STStorageProperties] = None, + ) -> STStorage: + """ + Args: + + area_id: the area id in which to create the short term storage + st_storage_name: the name of the short term storage + properties: the properties of the short term storage. If not provided, AntaresWeb will use its own default values. + + Returns: + The created short term storage + """ + pass + + @abstractmethod + def create_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: + """ + Args: + area: area to create wind series matrices + series: wind/series/wind_{area_id}.txt + + """ + pass + + @abstractmethod + def create_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: + """ + Args: + area: Area to create reserves series matrices + series: Pandas dataframe stored in reserves/{area_id}.txt + + Returns: + Reserves object with the provided Pandas dataframe + """ + pass + + @abstractmethod + def create_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: + """ + Args: + area: area to create reserves series matrices + series: solar/series/solar_{area_id}.txt + + """ + pass + + @abstractmethod + def create_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: + """ + Args: + area: area to create reserves series matrices + series: misc-gen/miscgen-{area_id}.txt + + """ + pass + + @abstractmethod + def create_hydro( + self, + area_id: str, + properties: Optional[HydroProperties], + matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]], + ) -> Hydro: + """ + Args: + area_id: area in which hydro will be created + properties: hydro properties + matrices: matrices for hydro to be created + + """ + pass + + @abstractmethod + def update_area_properties(self, area: Area, properties: AreaProperties) -> AreaProperties: + """ + Args: + area: concerned area + properties: new properties. Only registered fields will be updated. + """ + pass + + @abstractmethod + def update_area_ui(self, area: Area, ui: AreaUi) -> AreaUi: + """ + Args: + area: concerned area + ui: new ui. Only registered fields will be updated. + """ + pass + + @abstractmethod + def delete_area(self, area: Area) -> None: + """ + Args: + area: area object to be deleted + """ + pass + + @abstractmethod + def delete_thermal_clusters(self, area: Area, thermal_clusters: List[ThermalCluster]) -> None: + """ + Args: + area: area containing the cluster + thermal_clusters: List of thermal clusters object to be deleted + """ + pass + + @abstractmethod + def delete_renewable_clusters(self, area: Area, renewable_clusters: List[RenewableCluster]) -> None: + """ + Args: + area: area containing the cluster + renewable_clusters: List of renewable clusters object to be deleted + """ + pass + + @abstractmethod + def delete_st_storages(self, area: Area, storages: List[STStorage]) -> None: + """ + Args: + area: area containing the cluster + storages: List of short term storage objects to be deleted + """ + pass + + @abstractmethod + def upload_load_matrix(self, area: Area, load_matrix: pd.DataFrame) -> None: + """ + Args: + area: concerned area. + load_matrix: matrix in Dataframe format to write as the load matrix. + """ + # todo: What happens when given an empty DataFrame ? AntaresWeb doesn't handle such a case. + pass + + @abstractmethod + def get_load_matrix(self, area: Area) -> pd.DataFrame: + """ + Args: + area: concerned area. + """ + # todo: Currently we do not return index and column names because AntaresWeb doesn't send the full information. + # Once it will, there will be no change to do in the code on our side. + pass + + @abstractmethod + def read_thermal_cluster( + self, + area_id: str, + thermal_name: str, + properties: Optional[ThermalClusterProperties] = None, + ) -> ThermalCluster: + pass + + @abstractmethod + def read_renewable_cluster( + self, + area_id: str, + renewable_name: str, + properties: Optional[RenewableClusterProperties] = None, + series: Optional[pd.DataFrame] = None, + ) -> RenewableCluster: + pass + + @abstractmethod + def read_st_storage( + self, + area_id: str, + st_storage_name: str, + properties: Optional[STStorageProperties] = None, + ) -> STStorage: + pass + + @abstractmethod + def read_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: + pass + + @abstractmethod + def read_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: + pass + + @abstractmethod + def read_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: + pass + + @abstractmethod + def read_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: + pass + + @abstractmethod + def read_hydro( + self, + area_id: str, + properties: Optional[HydroProperties] = None, + matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]] = None, + ) -> Hydro: + pass + + @abstractmethod + def read_area( + self, + area_name: str, + properties: Optional[AreaProperties] = None, + ui: Optional[AreaUi] = None, + ) -> Area: + """ + Args: + area_name: area to be added to study + properties: area's properties. If not provided, default values will be used. + ui: area's ui characteristics. If not provided, default values will be used. + + Returns: area name if success or Error if area can not be + created + """ + + pass + + +class BaseLinkService(ABC): + @abstractmethod + def create_link( + self, + area_from: Area, + area_to: Area, + properties: Optional[LinkProperties] = None, + ui: Optional[LinkUi] = None, + existing_areas: Optional[MappingProxyType[str, Area]] = None, + ) -> Link: + """ + Args: + area_from: area where the link goes from + area_to: area where the link goes to + properties: link's properties + ui: link's ui characteristics + existing_areas: existing areas from study + + Returns: + The created link + """ + pass + + @abstractmethod + def delete_link(self, link: Link) -> None: + """ + Args: + link: link object to be deleted + """ + pass + + @abstractmethod + def update_link_properties(self, link: Link, properties: LinkProperties) -> LinkProperties: + """ + Args: + link: concerned link + properties: new properties. Only registered fields will be updated. + """ + pass + + @abstractmethod + def update_link_ui(self, link: Link, ui: LinkUi) -> LinkUi: + """ + Args: + link: concerned link + ui: new ui. Only registered fields will be updated. + """ + pass + + @abstractmethod + def read_link( + self, + area_from: Area, + area_to: Area, + properties: Optional[LinkProperties] = None, + ui: Optional[LinkUi] = None, + existing_areas: Optional[MappingProxyType[str, Area]] = None, + ) -> Link: + """ + Args: + area_from: area where the link goes from + area_to: area where the link goes to + properties: link's properties + ui: link's ui characteristics + existing_areas: existing areas from study + + Returns: + The created link + """ + pass + + +class BaseThermalService(ABC): + @abstractmethod + def update_thermal_properties( + self, thermal_cluster: ThermalCluster, properties: ThermalClusterProperties + ) -> ThermalClusterProperties: + """ + Args: + thermal_cluster: concerned cluster + properties: new properties. Only registered fields will be updated. + """ + pass + + @abstractmethod + def get_thermal_matrix(self, thermal_cluster: ThermalCluster, ts_name: ThermalClusterMatrixName) -> pd.DataFrame: + """ + Args: + thermal_cluster: cluster to retrieve matrix + ts_name: matrix name + + Returns: matrix requested + + """ + pass + + +class BaseBindingConstraintService(ABC): + @abstractmethod + def create_binding_constraint( + self, + name: str, + properties: Optional[BindingConstraintProperties] = None, + terms: Optional[List[ConstraintTerm]] = None, + less_term_matrix: Optional[pd.DataFrame] = None, + equal_term_matrix: Optional[pd.DataFrame] = None, + greater_term_matrix: Optional[pd.DataFrame] = None, + ) -> BindingConstraint: + """ + Args: + name: the binding constraint name + properties: the properties of the constraint. If not provided, AntaresWeb will use its own default values. + terms: the terms of the constraint. If not provided, no term will be created. + less_term_matrix: matrix corresponding to the lower bound of the constraint. If not provided, no matrix will be created. + equal_term_matrix: matrix corresponding to the equality bound of the constraint. If not provided, no matrix will be created. + greater_term_matrix: matrix corresponding to the upper bound of the constraint. If not provided, no matrix will be created. + + Returns: + The created binding constraint + """ + pass + + @abstractmethod + def add_constraint_terms(self, constraint: BindingConstraint, terms: List[ConstraintTerm]) -> List[ConstraintTerm]: + """ + Args: + constraint: the concerned binding constraint + terms: the terms to add to the constraint. + + Returns: + The created terms + """ + pass + + @abstractmethod + def delete_binding_constraint_term(self, constraint_id: str, term_id: str) -> None: + """ + Args: + constraint_id: binding constraint's id containing the term + term_id: binding constraint term to be deleted + """ + pass + + @abstractmethod + def update_binding_constraint_properties( + self, + binding_constraint: BindingConstraint, + properties: BindingConstraintProperties, + ) -> BindingConstraintProperties: + """ + Args: + binding_constraint: concerned binding_constraint + properties: new properties. Only registered fields will be updated. + """ + pass + + @abstractmethod + def get_constraint_matrix(self, constraint: BindingConstraint, matrix_name: ConstraintMatrixName) -> pd.DataFrame: + """ + Args: + constraint: the concerned binding constraint + matrix_name: the matrix suffix. + """ + pass + + @abstractmethod + def update_constraint_matrix( + self, + constraint: BindingConstraint, + matrix_name: ConstraintMatrixName, + matrix: pd.DataFrame, + ) -> None: + """ + Args: + constraint: the concerned binding constraint + matrix_name: the matrix suffix. + matrix: matrix to upload (in Dataframe format) + """ + pass + + @abstractmethod + def read_binding_constraint( + self, + name: str, + properties: Optional[BindingConstraintProperties] = None, + terms: Optional[List[ConstraintTerm]] = None, + less_term_matrix: Optional[pd.DataFrame] = None, + equal_term_matrix: Optional[pd.DataFrame] = None, + greater_term_matrix: Optional[pd.DataFrame] = None, + ) -> BindingConstraint: + """ + Args: + name: the binding constraint name + properties: the properties of the constraint. If not provided, AntaresWeb will use its own default values. + terms: the terms of the constraint. If not provided, no term will be created. + less_term_matrix: matrix corresponding to the lower bound of the constraint. If not provided, no matrix will be created. + equal_term_matrix: matrix corresponding to the equality bound of the constraint. If not provided, no matrix will be created. + greater_term_matrix: matrix corresponding to the upper bound of the constraint. If not provided, no matrix will be created. + + Returns: + The created binding constraint + """ + pass + + +class BaseStudyService(ABC): + @property + @abstractmethod + def study_id(self) -> str: + """The ID for the study""" + pass + + @property + @abstractmethod + def config(self) -> BaseConfiguration: + """The configuration of the study.""" + pass + + @abstractmethod + def update_study_settings(self, settings: StudySettings) -> Optional[StudySettings]: + """ + Args: + settings: new study settings. Only registered fields will be updated. + """ + pass + + @abstractmethod + def delete_binding_constraint(self, constraint: BindingConstraint) -> None: + """ + Args: + constraint: binding constraint object to be deleted + """ + pass + + @abstractmethod + def delete(self, children: bool) -> None: + """ + Deletes the study and its children if children is True + """ + pass + + @abstractmethod + def read_areas(self) -> None: + """ + Read areas + """ + pass + + +class BaseRenewableService(ABC): + @abstractmethod + def update_renewable_properties( + self, + renewable_cluster: RenewableCluster, + properties: RenewableClusterProperties, + ) -> RenewableClusterProperties: + """ + Args: + renewable_cluster: concerned cluster + properties: new properties. Only registered fields will be updated. + """ + pass + + @abstractmethod + def get_renewable_matrix( + self, + renewable: RenewableCluster, + ) -> pd.DataFrame: + """ + Args: + renewable: renewable cluster to retrieve matrix + + Returns: matrix requested + + """ + pass + + +class BaseShortTermStorageService(ABC): + @abstractmethod + def update_st_storage_properties( + self, st_storage: STStorage, properties: STStorageProperties + ) -> STStorageProperties: + """ + Args: + st_storage: concerned storage + properties: new properties. Only registered fields will be updated. + """ + pass diff --git a/src/antares/service/local_services/area_local.py b/src/antares/service/local_services/area_local.py index 1fefde37..09bdece8 100644 --- a/src/antares/service/local_services/area_local.py +++ b/src/antares/service/local_services/area_local.py @@ -1,502 +1,444 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import logging -import os -from configparser import ConfigParser -from typing import Optional, Dict, List, Any - -import pandas as pd - -from antares.config.local_configuration import LocalConfiguration -from antares.exceptions.exceptions import CustomError -from antares.model.area import ( - AreaProperties, - AreaUi, - Area, - AreaPropertiesLocal, - AreaUiLocal, -) -from antares.model.hydro import ( - HydroProperties, - HydroMatrixName, - Hydro, - HydroPropertiesLocal, -) -from antares.model.misc_gen import MiscGen -from antares.model.renewable import ( - RenewableClusterProperties, - RenewableCluster, - RenewableClusterPropertiesLocal, -) -from antares.model.reserves import Reserves -from antares.model.solar import Solar -from antares.model.st_storage import ( - STStorageProperties, - STStorage, - STStoragePropertiesLocal, -) -from antares.model.thermal import ( - ThermalClusterProperties, - ThermalCluster, - ThermalClusterPropertiesLocal, -) -from antares.model.wind import Wind -from antares.service.base_services import ( - BaseAreaService, - BaseShortTermStorageService, - BaseThermalService, - BaseRenewableService, -) -from antares.tools.ini_tool import IniFileTypes, IniFile -from antares.tools.time_series_tool import TimeSeriesFile, TimeSeriesFileType - - -def _sets_ini_content() -> ConfigParser: - """ - Returns: sets.ini contents with default values - """ - sets_ini = ConfigParser() - sets_ini_dict = { - "all areas": { - "caption": "All areas", - "comments": "Spatial aggregates on all areas", - "output": "false", - "apply-filter": "add-all", - } - } - sets_ini.read_dict(sets_ini_dict) - return sets_ini - - -class AreaLocalService(BaseAreaService): - def __init__( - self, config: LocalConfiguration, study_name: str, **kwargs: Any - ) -> None: - super().__init__(**kwargs) - self.config = config - self.study_name = study_name - - def set_storage_service(self, storage_service: BaseShortTermStorageService) -> None: - self.storage_service = storage_service - - def set_thermal_service(self, thermal_service: BaseThermalService) -> None: - self.thermal_service = thermal_service - - def set_renewable_service(self, renewable_service: BaseRenewableService) -> None: - self.renewable_service = renewable_service - - def create_thermal_cluster( - self, - area_id: str, - thermal_name: str, - properties: Optional[ThermalClusterProperties] = None, - ) -> ThermalCluster: - local_thermal_properties = ThermalClusterPropertiesLocal( - thermal_name, properties - ) - - list_ini = IniFile( - self.config.study_path, IniFileTypes.THERMAL_LIST_INI, area_name=area_id - ) - list_ini.add_section(local_thermal_properties.list_ini_fields) - list_ini.write_ini_file(sort_sections=True) - - return ThermalCluster( - self.thermal_service, - area_id, - thermal_name, - local_thermal_properties.yield_thermal_cluster_properties(), - ) - - def create_thermal_cluster_with_matrices( - self, - area_id: str, - cluster_name: str, - parameters: ThermalClusterProperties, - prepro: Optional[pd.DataFrame], - modulation: Optional[pd.DataFrame], - series: Optional[pd.DataFrame], - CO2Cost: Optional[pd.DataFrame], - fuelCost: Optional[pd.DataFrame], - ) -> ThermalCluster: - raise NotImplementedError - - def create_renewable_cluster( - self, - area_id: str, - renewable_name: str, - properties: Optional[RenewableClusterProperties] = None, - series: Optional[pd.DataFrame] = None, - ) -> RenewableCluster: - local_properties = RenewableClusterPropertiesLocal(renewable_name, properties) - - list_ini = IniFile( - self.config.study_path, IniFileTypes.RENEWABLES_LIST_INI, area_name=area_id - ) - list_ini.add_section(local_properties.ini_fields) - list_ini.write_ini_file() - - return RenewableCluster( - self.renewable_service, - area_id, - renewable_name, - local_properties.yield_renewable_cluster_properties(), - ) - - def create_st_storage( - self, - area_id: str, - st_storage_name: str, - properties: Optional[STStorageProperties] = None, - ) -> STStorage: - local_st_storage_properties = STStoragePropertiesLocal( - st_storage_name, properties - ) - - list_ini = IniFile( - self.config.study_path, IniFileTypes.ST_STORAGE_LIST_INI, area_name=area_id - ) - list_ini.add_section(local_st_storage_properties.list_ini_fields) - list_ini.write_ini_file(sort_sections=True) - - return STStorage( - self.storage_service, - area_id, - st_storage_name, - local_st_storage_properties.yield_st_storage_properties(), - ) - - def create_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: - series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.WIND, self.config.study_path, area.id, series - ) - return Wind(series, local_file) - - def create_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: - series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.RESERVES, self.config.study_path, area.id, series - ) - return Reserves(series, local_file) - - def create_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: - series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.SOLAR, self.config.study_path, area.id, series - ) - return Solar(series, local_file) - - def create_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: - series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.MISC_GEN, self.config.study_path, area.id, series - ) - return MiscGen(series, local_file) - - def create_hydro( - self, - area_id: str, - properties: Optional[HydroProperties] = None, - matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]] = None, - ) -> Hydro: - local_hydro_properties = HydroPropertiesLocal(area_id, properties) - - list_ini = IniFile(self.config.study_path, IniFileTypes.HYDRO_INI) - list_ini.add_section(local_hydro_properties.hydro_ini_fields) - list_ini.write_ini_file(sort_section_content=True) - - return Hydro(self, area_id, local_hydro_properties.yield_hydro_properties()) - - def create_area( - self, - area_name: str, - properties: Optional[AreaProperties] = None, - ui: Optional[AreaUi] = None, - ) -> Area: - """ - Args: - area_name: area to be added to study - properties: area's properties. If not provided, default values will be used. - ui: area's ui characteristics. If not provided, default values will be used. - - Returns: area name if success or Error if area can not be - created - """ - - def _line_exists_in_file(file_content: str, line_to_add: str) -> bool: - """ - Args: - file_content: file content to check - line_to_add: line to add - - Returns: True if line is already present in file. - - """ - return line_to_add.strip() in file_content.split("\n") - - study_directory = self.config.local_path / self.study_name / "input" - areas_directory = study_directory / "areas" - new_area_directory = areas_directory / area_name - - # Create "areas" directory if it doesn't exist - os.makedirs(new_area_directory, exist_ok=True) - - list_path = areas_directory / "list.txt" - - area_to_add = f"{area_name}\n" - try: - if os.path.isfile(list_path): - with open(list_path, "r") as list_file: - list_file_content = list_file.read() - if _line_exists_in_file(list_file_content, area_to_add): - raise ValueError( - f"The Area '{area_name}' already exists in the study {self.study_name}." - ) - updated_list = sorted( - list_file_content.splitlines(keepends=True) + [area_to_add] - ) - else: - updated_list = [area_to_add] - - # Write area(s) to file list.txt - with open(list_path, "w") as list_txt: - list_txt.write("".join(map(str, updated_list))) - - # TODO: Handle districts in sets.ini later - sets_ini_content = _sets_ini_content() - - with (self.config.study_path / IniFileTypes.AREAS_SETS_INI.value).open( - "w" - ) as sets_ini: - sets_ini_content.write(sets_ini) - - local_properties = ( - AreaPropertiesLocal(properties) if properties else AreaPropertiesLocal() - ) - - adequacy_patch_ini = IniFile( - self.config.study_path, IniFileTypes.AREA_ADEQUACY_PATCH_INI, area_name - ) - adequacy_patch_ini.add_section(local_properties.adequacy_patch_mode()) - adequacy_patch_ini.write_ini_file() - - optimization_ini = ConfigParser() - optimization_ini.read_dict( - local_properties.model_dump(by_alias=True, exclude_none=True) - ) - - with open( - new_area_directory / "optimization.ini", "w" - ) as optimization_ini_file: - optimization_ini.write(optimization_ini_file) - - areas_ini = IniFile(self.config.study_path, IniFileTypes.THERMAL_AREAS_INI) - if not areas_ini.ini_dict: - areas_ini.add_section({"unserverdenergycost": {}}) - areas_ini.add_section({"spilledenergycost": {}}) - areas_ini.write_ini_file() - areas_ini.parsed_ini["unserverdenergycost"][area_name] = ( - local_properties.nodal_optimization["average-unsupplied-energy-cost"] - ) - areas_ini.parsed_ini["spilledenergycost"][area_name] = ( - local_properties.nodal_optimization["average-spilled-energy-cost"] - ) - areas_ini.write_ini_file() - - local_ui = AreaUiLocal(ui) if ui else AreaUiLocal() - ui_ini = ConfigParser() - ui_ini.read_dict(local_ui.model_dump(exclude_none=True)) - with open(new_area_directory / "ui.ini", "w") as ui_ini_file: - ui_ini.write(ui_ini_file) - - except Exception as e: - raise CustomError(f"Error during area creation: {e}") from e - - logging.info(f"Area {area_name} created successfully!") - created_area = Area( - name=area_name, - area_service=self, - storage_service=self.storage_service, - thermal_service=self.thermal_service, - renewable_service=self.renewable_service, - properties=local_properties.yield_area_properties(), - ui=local_ui.yield_area_ui(), - ) - created_area.create_hydro() - return created_area - - def delete_area(self, area: Area) -> None: - raise NotImplementedError - - def update_area_properties( - self, area: Area, properties: AreaProperties - ) -> AreaProperties: - raise NotImplementedError - - def update_area_ui(self, area: Area, ui: AreaUi) -> AreaUi: - raise NotImplementedError - - def delete_thermal_clusters( - self, area: Area, thermal_clusters: List[ThermalCluster] - ) -> None: - raise NotImplementedError - - def delete_renewable_clusters( - self, area: Area, renewable_clusters: List[RenewableCluster] - ) -> None: - raise NotImplementedError - - def delete_st_storages(self, area: Area, storages: List[STStorage]) -> None: - raise NotImplementedError - - def upload_load_matrix(self, area: Area, load_matrix: pd.DataFrame) -> None: - raise NotImplementedError - - def get_load_matrix(self, area: Area) -> pd.DataFrame: - raise NotImplementedError - - def read_thermal_cluster( - self, - area_id: str, - thermal_name: str, - properties: Optional[ThermalClusterProperties] = None, - ) -> ThermalCluster: - local_thermal_properties = ThermalClusterPropertiesLocal( - thermal_name, properties - ) - - list_ini = IniFile( - self.config.study_path, IniFileTypes.THERMAL_LIST_INI, area_name=area_id - ) - list_ini.add_section(local_thermal_properties.list_ini_fields) - list_ini.write_ini_file(sort_sections=True) - - return ThermalCluster( - self.thermal_service, - area_id, - thermal_name, - local_thermal_properties.yield_thermal_cluster_properties(), - ) - - def read_renewable_cluster( - self, - area_id: str, - renewable_name: str, - properties: Optional[RenewableClusterProperties] = None, - ) -> RenewableCluster: - local_properties = RenewableClusterPropertiesLocal(renewable_name, properties) - - list_ini = IniFile( - self.config.study_path, IniFileTypes.RENEWABLES_LIST_INI, area_name=area_id - ) - list_ini.add_section(local_properties.ini_fields) - list_ini.write_ini_file() - - return RenewableCluster( - self.renewable_service, - area_id, - renewable_name, - local_properties.yield_renewable_cluster_properties(), - ) - - def read_st_storage( - self, - area_id: str, - st_storage_name: str, - properties: Optional[STStorageProperties] = None, - ) -> STStorage: - local_st_storage_properties = STStoragePropertiesLocal( - st_storage_name, properties - ) - - list_ini = IniFile( - self.config.study_path, IniFileTypes.ST_STORAGE_LIST_INI, area_name=area_id - ) - list_ini.add_section(local_st_storage_properties.list_ini_fields) - list_ini.write_ini_file(sort_sections=True) - - return STStorage( - self.storage_service, - area_id, - st_storage_name, - local_st_storage_properties.yield_st_storage_properties(), - ) - - def read_wind(self, area: Area) -> Wind: - series = pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.WIND, self.config.study_path, area.id, series - ) - return Wind(series, local_file) - - def read_reserves(self, area: Area) -> Reserves: - series = pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.RESERVES, self.config.study_path, area.id, series - ) - return Reserves(series, local_file) - - def read_solar(self, area: Area) -> Solar: - series = pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.SOLAR, self.config.study_path, area.id, series - ) - return Solar(series, local_file) - - def read_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: - series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.MISC_GEN, self.config.study_path, area.id, series - ) - return MiscGen(series, local_file) - - def read_hydro( - self, - area_id: str, - properties: Optional[HydroProperties] = None, - ) -> Hydro: - local_hydro_properties = HydroPropertiesLocal(area_id, properties) - - list_ini = IniFile(self.config.study_path, IniFileTypes.HYDRO_INI) - list_ini.add_section(local_hydro_properties.hydro_ini_fields) - list_ini.write_ini_file(sort_section_content=True) - - return Hydro(self, area_id, local_hydro_properties.yield_hydro_properties()) - - def read_area(self, area_name: str, area_id: str) -> Area: - """ - Args: - area_name: area to be added to study - area_id: area id. If not provided, default values will be used. - - Returns: area object if success or Error if area can not be - read - """ - - def _line_exists_in_file(file_content: str, line_to_add: str) -> bool: - """ - Args: - file_content: file content to check - line_to_add: line to add - - Returns: True if line is already present in file. - - """ - return line_to_add.strip() in file_content.split("\n") - - existing_path = self._config.local_path - study_path = existing_path / self.study_name - list_ini = IniFile(self.config.study_path, IniFileTypes.AREAS_SETS_INI) - - return {} +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import logging +import os +from configparser import ConfigParser +from typing import Optional, Dict, List, Any + +import pandas as pd + +from antares.config.local_configuration import LocalConfiguration +from antares.exceptions.exceptions import CustomError +from antares.model.area import ( + AreaProperties, + AreaUi, + Area, + AreaPropertiesLocal, + AreaUiLocal, +) +from antares.model.hydro import ( + HydroProperties, + HydroMatrixName, + Hydro, + HydroPropertiesLocal, +) +from antares.model.misc_gen import MiscGen +from antares.model.renewable import ( + RenewableClusterProperties, + RenewableCluster, + RenewableClusterPropertiesLocal, +) +from antares.model.reserves import Reserves +from antares.model.solar import Solar +from antares.model.st_storage import ( + STStorageProperties, + STStorage, + STStoragePropertiesLocal, +) +from antares.model.thermal import ( + ThermalClusterProperties, + ThermalCluster, + ThermalClusterPropertiesLocal, +) +from antares.model.wind import Wind +from antares.service.base_services import ( + BaseAreaService, + BaseShortTermStorageService, + BaseThermalService, + BaseRenewableService, +) +from antares.tools.ini_tool import IniFileTypes, IniFile +from antares.tools.time_series_tool import TimeSeriesFile, TimeSeriesFileType + + +def _sets_ini_content() -> ConfigParser: + """ + Returns: sets.ini contents with default values + """ + sets_ini = ConfigParser() + sets_ini_dict = { + "all areas": { + "caption": "All areas", + "comments": "Spatial aggregates on all areas", + "output": "false", + "apply-filter": "add-all", + } + } + sets_ini.read_dict(sets_ini_dict) + return sets_ini + + +class AreaLocalService(BaseAreaService): + def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) -> None: + super().__init__(**kwargs) + self.config = config + self.study_name = study_name + + def set_storage_service(self, storage_service: BaseShortTermStorageService) -> None: + self.storage_service = storage_service + + def set_thermal_service(self, thermal_service: BaseThermalService) -> None: + self.thermal_service = thermal_service + + def set_renewable_service(self, renewable_service: BaseRenewableService) -> None: + self.renewable_service = renewable_service + + def create_thermal_cluster( + self, + area_id: str, + thermal_name: str, + properties: Optional[ThermalClusterProperties] = None, + ) -> ThermalCluster: + local_thermal_properties = ThermalClusterPropertiesLocal(thermal_name, properties) + + list_ini = IniFile(self.config.study_path, IniFileTypes.THERMAL_LIST_INI, area_name=area_id) + list_ini.add_section(local_thermal_properties.list_ini_fields) + list_ini.write_ini_file(sort_sections=True) + + return ThermalCluster( + self.thermal_service, + area_id, + thermal_name, + local_thermal_properties.yield_thermal_cluster_properties(), + ) + + def create_thermal_cluster_with_matrices( + self, + area_id: str, + cluster_name: str, + parameters: ThermalClusterProperties, + prepro: Optional[pd.DataFrame], + modulation: Optional[pd.DataFrame], + series: Optional[pd.DataFrame], + CO2Cost: Optional[pd.DataFrame], + fuelCost: Optional[pd.DataFrame], + ) -> ThermalCluster: + raise NotImplementedError + + def create_renewable_cluster( + self, + area_id: str, + renewable_name: str, + properties: Optional[RenewableClusterProperties] = None, + series: Optional[pd.DataFrame] = None, + ) -> RenewableCluster: + local_properties = RenewableClusterPropertiesLocal(renewable_name, properties) + + list_ini = IniFile(self.config.study_path, IniFileTypes.RENEWABLES_LIST_INI, area_name=area_id) + list_ini.add_section(local_properties.ini_fields) + list_ini.write_ini_file() + + return RenewableCluster( + self.renewable_service, + area_id, + renewable_name, + local_properties.yield_renewable_cluster_properties(), + ) + + def create_st_storage( + self, + area_id: str, + st_storage_name: str, + properties: Optional[STStorageProperties] = None, + ) -> STStorage: + local_st_storage_properties = STStoragePropertiesLocal(st_storage_name, properties) + + list_ini = IniFile(self.config.study_path, IniFileTypes.ST_STORAGE_LIST_INI, area_name=area_id) + list_ini.add_section(local_st_storage_properties.list_ini_fields) + list_ini.write_ini_file(sort_sections=True) + + return STStorage( + self.storage_service, + area_id, + st_storage_name, + local_st_storage_properties.yield_st_storage_properties(), + ) + + def create_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: + series = series if series is not None else pd.DataFrame([]) + local_file = TimeSeriesFile(TimeSeriesFileType.WIND, self.config.study_path, area.id, series) + return Wind(series, local_file) + + def create_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: + series = series if series is not None else pd.DataFrame([]) + local_file = TimeSeriesFile(TimeSeriesFileType.RESERVES, self.config.study_path, area.id, series) + return Reserves(series, local_file) + + def create_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: + series = series if series is not None else pd.DataFrame([]) + local_file = TimeSeriesFile(TimeSeriesFileType.SOLAR, self.config.study_path, area.id, series) + return Solar(series, local_file) + + def create_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: + series = series if series is not None else pd.DataFrame([]) + local_file = TimeSeriesFile(TimeSeriesFileType.MISC_GEN, self.config.study_path, area.id, series) + return MiscGen(series, local_file) + + def create_hydro( + self, + area_id: str, + properties: Optional[HydroProperties] = None, + matrices: Optional[Dict[HydroMatrixName, pd.DataFrame]] = None, + ) -> Hydro: + local_hydro_properties = HydroPropertiesLocal(area_id, properties) + + list_ini = IniFile(self.config.study_path, IniFileTypes.HYDRO_INI) + list_ini.add_section(local_hydro_properties.hydro_ini_fields) + list_ini.write_ini_file(sort_section_content=True) + + return Hydro(self, area_id, local_hydro_properties.yield_hydro_properties()) + + def create_area( + self, + area_name: str, + properties: Optional[AreaProperties] = None, + ui: Optional[AreaUi] = None, + ) -> Area: + """ + Args: + area_name: area to be added to study + properties: area's properties. If not provided, default values will be used. + ui: area's ui characteristics. If not provided, default values will be used. + + Returns: area name if success or Error if area can not be + created + """ + + def _line_exists_in_file(file_content: str, line_to_add: str) -> bool: + """ + Args: + file_content: file content to check + line_to_add: line to add + + Returns: True if line is already present in file. + + """ + return line_to_add.strip() in file_content.split("\n") + + study_directory = self.config.local_path / self.study_name / "input" + areas_directory = study_directory / "areas" + new_area_directory = areas_directory / area_name + + # Create "areas" directory if it doesn't exist + os.makedirs(new_area_directory, exist_ok=True) + + list_path = areas_directory / "list.txt" + + area_to_add = f"{area_name}\n" + try: + if os.path.isfile(list_path): + with open(list_path, "r") as list_file: + list_file_content = list_file.read() + if _line_exists_in_file(list_file_content, area_to_add): + raise ValueError(f"The Area '{area_name}' already exists in the study {self.study_name}.") + updated_list = sorted(list_file_content.splitlines(keepends=True) + [area_to_add]) + else: + updated_list = [area_to_add] + + # Write area(s) to file list.txt + with open(list_path, "w") as list_txt: + list_txt.write("".join(map(str, updated_list))) + + # TODO: Handle districts in sets.ini later + sets_ini_content = _sets_ini_content() + + with (self.config.study_path / IniFileTypes.AREAS_SETS_INI.value).open("w") as sets_ini: + sets_ini_content.write(sets_ini) + + local_properties = AreaPropertiesLocal(properties) if properties else AreaPropertiesLocal() + + adequacy_patch_ini = IniFile(self.config.study_path, IniFileTypes.AREA_ADEQUACY_PATCH_INI, area_name) + adequacy_patch_ini.add_section(local_properties.adequacy_patch_mode()) + adequacy_patch_ini.write_ini_file() + + optimization_ini = ConfigParser() + optimization_ini.read_dict(local_properties.model_dump(by_alias=True, exclude_none=True)) + + with open(new_area_directory / "optimization.ini", "w") as optimization_ini_file: + optimization_ini.write(optimization_ini_file) + + areas_ini = IniFile(self.config.study_path, IniFileTypes.THERMAL_AREAS_INI) + if not areas_ini.ini_dict: + areas_ini.add_section({"unserverdenergycost": {}}) + areas_ini.add_section({"spilledenergycost": {}}) + areas_ini.write_ini_file() + areas_ini.parsed_ini["unserverdenergycost"][area_name] = local_properties.nodal_optimization[ + "average-unsupplied-energy-cost" + ] + areas_ini.parsed_ini["spilledenergycost"][area_name] = local_properties.nodal_optimization[ + "average-spilled-energy-cost" + ] + areas_ini.write_ini_file() + + local_ui = AreaUiLocal(ui) if ui else AreaUiLocal() + ui_ini = ConfigParser() + ui_ini.read_dict(local_ui.model_dump(exclude_none=True)) + with open(new_area_directory / "ui.ini", "w") as ui_ini_file: + ui_ini.write(ui_ini_file) + + except Exception as e: + raise CustomError(f"Error during area creation: {e}") from e + + logging.info(f"Area {area_name} created successfully!") + created_area = Area( + name=area_name, + area_service=self, + storage_service=self.storage_service, + thermal_service=self.thermal_service, + renewable_service=self.renewable_service, + properties=local_properties.yield_area_properties(), + ui=local_ui.yield_area_ui(), + ) + created_area.create_hydro() + return created_area + + def delete_area(self, area: Area) -> None: + raise NotImplementedError + + def update_area_properties(self, area: Area, properties: AreaProperties) -> AreaProperties: + raise NotImplementedError + + def update_area_ui(self, area: Area, ui: AreaUi) -> AreaUi: + raise NotImplementedError + + def delete_thermal_clusters(self, area: Area, thermal_clusters: List[ThermalCluster]) -> None: + raise NotImplementedError + + def delete_renewable_clusters(self, area: Area, renewable_clusters: List[RenewableCluster]) -> None: + raise NotImplementedError + + def delete_st_storages(self, area: Area, storages: List[STStorage]) -> None: + raise NotImplementedError + + def upload_load_matrix(self, area: Area, load_matrix: pd.DataFrame) -> None: + raise NotImplementedError + + def get_load_matrix(self, area: Area) -> pd.DataFrame: + raise NotImplementedError + + def read_thermal_cluster( + self, + area_id: str, + thermal_name: str, + properties: Optional[ThermalClusterProperties] = None, + ) -> ThermalCluster: + local_thermal_properties = ThermalClusterPropertiesLocal(thermal_name, properties) + + list_ini = IniFile(self.config.study_path, IniFileTypes.THERMAL_LIST_INI, area_name=area_id) + list_ini.add_section(local_thermal_properties.list_ini_fields) + list_ini.write_ini_file(sort_sections=True) + + return ThermalCluster( + self.thermal_service, + area_id, + thermal_name, + local_thermal_properties.yield_thermal_cluster_properties(), + ) + + def read_renewable_cluster( + self, + area_id: str, + renewable_name: str, + properties: Optional[RenewableClusterProperties] = None, + ) -> RenewableCluster: + local_properties = RenewableClusterPropertiesLocal(renewable_name, properties) + + list_ini = IniFile(self.config.study_path, IniFileTypes.RENEWABLES_LIST_INI, area_name=area_id) + list_ini.add_section(local_properties.ini_fields) + list_ini.write_ini_file() + + return RenewableCluster( + self.renewable_service, + area_id, + renewable_name, + local_properties.yield_renewable_cluster_properties(), + ) + + def read_st_storage( + self, + area_id: str, + st_storage_name: str, + properties: Optional[STStorageProperties] = None, + ) -> STStorage: + local_st_storage_properties = STStoragePropertiesLocal(st_storage_name, properties) + + list_ini = IniFile(self.config.study_path, IniFileTypes.ST_STORAGE_LIST_INI, area_name=area_id) + list_ini.add_section(local_st_storage_properties.list_ini_fields) + list_ini.write_ini_file(sort_sections=True) + + return STStorage( + self.storage_service, + area_id, + st_storage_name, + local_st_storage_properties.yield_st_storage_properties(), + ) + + def read_wind(self, area: Area) -> Wind: + series = pd.DataFrame([]) + local_file = TimeSeriesFile(TimeSeriesFileType.WIND, self.config.study_path, area.id, series) + return Wind(series, local_file) + + def read_reserves(self, area: Area) -> Reserves: + series = pd.DataFrame([]) + local_file = TimeSeriesFile(TimeSeriesFileType.RESERVES, self.config.study_path, area.id, series) + return Reserves(series, local_file) + + def read_solar(self, area: Area) -> Solar: + series = pd.DataFrame([]) + local_file = TimeSeriesFile(TimeSeriesFileType.SOLAR, self.config.study_path, area.id, series) + return Solar(series, local_file) + + def read_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: + series = series if series is not None else pd.DataFrame([]) + local_file = TimeSeriesFile(TimeSeriesFileType.MISC_GEN, self.config.study_path, area.id, series) + return MiscGen(series, local_file) + + def read_hydro( + self, + area_id: str, + properties: Optional[HydroProperties] = None, + ) -> Hydro: + local_hydro_properties = HydroPropertiesLocal(area_id, properties) + + list_ini = IniFile(self.config.study_path, IniFileTypes.HYDRO_INI) + list_ini.add_section(local_hydro_properties.hydro_ini_fields) + list_ini.write_ini_file(sort_section_content=True) + + return Hydro(self, area_id, local_hydro_properties.yield_hydro_properties()) + + def read_area(self, area_name: str, area_id: str) -> Area: + """ + Args: + area_name: area to be added to study + area_id: area id. If not provided, default values will be used. + + Returns: area object if success or Error if area can not be + read + """ + + def _line_exists_in_file(file_content: str, line_to_add: str) -> bool: + """ + Args: + file_content: file content to check + line_to_add: line to add + + Returns: True if line is already present in file. + + """ + return line_to_add.strip() in file_content.split("\n") + + existing_path = self._config.local_path + study_path = existing_path / self.study_name + list_ini = IniFile(self.config.study_path, IniFileTypes.AREAS_SETS_INI) + + return {} diff --git a/src/antares/service/local_services/binding_constraint_local.py b/src/antares/service/local_services/binding_constraint_local.py index dfa6a7ee..93a9e9f4 100644 --- a/src/antares/service/local_services/binding_constraint_local.py +++ b/src/antares/service/local_services/binding_constraint_local.py @@ -1,68 +1,62 @@ -from typing import Optional, List, Any - -import pandas as pd - -from antares.config.local_configuration import LocalConfiguration -from antares.model.binding_constraint import ( - BindingConstraintProperties, - ConstraintTerm, - BindingConstraint, - ConstraintMatrixName, -) -from antares.service.base_services import BaseBindingConstraintService - - -class BindingConstraintLocalService(BaseBindingConstraintService): - def __init__( - self, config: LocalConfiguration, study_name: str, **kwargs: Any - ) -> None: - super().__init__(**kwargs) - self.config = config - self.study_name = study_name - - def create_binding_constraint( - self, - name: str, - properties: Optional[BindingConstraintProperties] = None, - terms: Optional[List[ConstraintTerm]] = None, - less_term_matrix: Optional[pd.DataFrame] = None, - equal_term_matrix: Optional[pd.DataFrame] = None, - greater_term_matrix: Optional[pd.DataFrame] = None, - ) -> BindingConstraint: - raise NotImplementedError - - def add_constraint_terms( - self, constraint: BindingConstraint, terms: List[ConstraintTerm] - ) -> List[ConstraintTerm]: - raise NotImplementedError - - def delete_binding_constraint_term(self, constraint_id: str, term_id: str) -> None: - raise NotImplementedError - - def update_binding_constraint_properties( - self, - binding_constraint: BindingConstraint, - properties: BindingConstraintProperties, - ) -> BindingConstraintProperties: - raise NotImplementedError - - def get_constraint_matrix( - self, constraint: BindingConstraint, matrix_name: ConstraintMatrixName - ) -> pd.DataFrame: - raise NotImplementedError - - def update_constraint_matrix( - self, - constraint: BindingConstraint, - matrix_name: ConstraintMatrixName, - matrix: pd.DataFrame, - ) -> None: - raise NotImplementedError - - def read_binding_constraint( - self, - constraint: BindingConstraint, - matrix_name: ConstraintMatrixName, - matrix: pd.DataFrame, - ) -> None: - raise NotImplementedError +from typing import Optional, List, Any + +import pandas as pd + +from antares.config.local_configuration import LocalConfiguration +from antares.model.binding_constraint import ( + BindingConstraintProperties, + ConstraintTerm, + BindingConstraint, + ConstraintMatrixName, +) +from antares.service.base_services import BaseBindingConstraintService + + +class BindingConstraintLocalService(BaseBindingConstraintService): + def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) -> None: + super().__init__(**kwargs) + self.config = config + self.study_name = study_name + + def create_binding_constraint( + self, + name: str, + properties: Optional[BindingConstraintProperties] = None, + terms: Optional[List[ConstraintTerm]] = None, + less_term_matrix: Optional[pd.DataFrame] = None, + equal_term_matrix: Optional[pd.DataFrame] = None, + greater_term_matrix: Optional[pd.DataFrame] = None, + ) -> BindingConstraint: + raise NotImplementedError + + def add_constraint_terms(self, constraint: BindingConstraint, terms: List[ConstraintTerm]) -> List[ConstraintTerm]: + raise NotImplementedError + + def delete_binding_constraint_term(self, constraint_id: str, term_id: str) -> None: + raise NotImplementedError + + def update_binding_constraint_properties( + self, + binding_constraint: BindingConstraint, + properties: BindingConstraintProperties, + ) -> BindingConstraintProperties: + raise NotImplementedError + + def get_constraint_matrix(self, constraint: BindingConstraint, matrix_name: ConstraintMatrixName) -> pd.DataFrame: + raise NotImplementedError + + def update_constraint_matrix( + self, + constraint: BindingConstraint, + matrix_name: ConstraintMatrixName, + matrix: pd.DataFrame, + ) -> None: + raise NotImplementedError + + def read_binding_constraint( + self, + constraint: BindingConstraint, + matrix_name: ConstraintMatrixName, + matrix: pd.DataFrame, + ) -> None: + raise NotImplementedError diff --git a/src/antares/service/local_services/link_local.py b/src/antares/service/local_services/link_local.py index e5136fa8..d0a881a3 100644 --- a/src/antares/service/local_services/link_local.py +++ b/src/antares/service/local_services/link_local.py @@ -1,150 +1,136 @@ -import configparser -import os -from types import MappingProxyType -from typing import Optional, Any, Dict - -from antares.config.local_configuration import LocalConfiguration -from antares.exceptions.exceptions import LinkCreationError, CustomError -from antares.model.area import Area -from antares.model.link import ( - LinkProperties, - LinkUi, - Link, - LinkPropertiesLocal, - LinkUiLocal, -) -from antares.service.base_services import BaseLinkService -from antares.tools.contents_tool import sort_ini_sections - - -class LinkLocalService(BaseLinkService): - def __init__( - self, config: LocalConfiguration, study_name: str, **kwargs: Any - ) -> None: - super().__init__(**kwargs) - self.config = config - self.study_name = study_name - - def create_link( - self, - area_from: Area, - area_to: Area, - properties: Optional[LinkProperties] = None, - ui: Optional[LinkUi] = None, - existing_areas: Optional[MappingProxyType[str, Area]] = None, - ) -> Link: - """ - Args: - area_from: area where the link goes from - area_to: area where the link goes to - properties: link's properties - ui: link's ui characteristics - existing_areas: existing areas from study - - Returns: - The created link - - Raises: - LinkCreationError if an area doesn't exist or existing areas have not been provided - """ - areas = dict(sorted({area_from.name: area_from, area_to.name: area_to}.items())) - - if existing_areas is not None: - for area in areas.keys(): - if area not in existing_areas: - raise LinkCreationError( - area_from.name, area_to.name, f"{area} does not exist." - ) - else: - raise LinkCreationError( - area_from.name, area_to.name, "Cannot verify existing areas." - ) - - area_from, area_to = areas.values() - - link_dir = self.config.study_path / "input/links" / area_from.name - os.makedirs(link_dir, exist_ok=True) - - local_properties = ( - LinkPropertiesLocal(properties) if properties else LinkPropertiesLocal() - ) - local_ui = LinkUiLocal(ui) if ui else LinkUiLocal() - - properties_ini_file = link_dir / "properties.ini" - properties_ini = configparser.ConfigParser() - - if properties_ini_file.is_file(): - with open(properties_ini_file, "r") as ini_file: - properties_ini.read_file(ini_file) - try: - properties_ini.add_section(area_to.name) - except configparser.DuplicateSectionError as e: - raise CustomError( - f"Link exists already, section already exists in properties.ini:\n\n{e.message}" - ) - ini_dict = dict(local_properties.ini_fields) - ini_dict.update(local_ui.ini_fields) - properties_ini[area_to.name] = self.sort_link_properties_dict(ini_dict) - - properties_ini = sort_ini_sections(properties_ini) - - with open(properties_ini_file, "w") as ini_file: - properties_ini.write(ini_file) - - return Link( - area_from=area_from, - area_to=area_to, - link_service=self, - properties=local_properties.yield_link_properties(), - ui=local_ui.yield_link_ui(), - ) - - def delete_link(self, link: Link) -> None: - raise NotImplementedError - - def update_link_properties( - self, link: Link, properties: LinkProperties - ) -> LinkProperties: - raise NotImplementedError - - def update_link_ui(self, link: Link, ui: LinkUi) -> LinkUi: - raise NotImplementedError - - # TODO maybe put sorting functions together - @staticmethod - def sort_link_properties_dict(ini_dict: Dict[str, str]) -> Dict[str, str]: - dict_order = [ - "hurdles-cost", - "loop-flow", - "use-phase-shifter", - "transmission-capacities", - "asset-type", - "link-style", - "link-width", - "colorr", - "colorg", - "colorb", - "display-comments", - "filter-synthesis", - "filter-year-by-year", - ] - return dict( - sorted(ini_dict.items(), key=lambda item: dict_order.index(item[0])) - ) - - def read_link(self, area_from: Area, area_to: Area) -> Link: - """ - Args: - area_from: area where the link goes from - area_to: area where the link goes to - properties: link's properties - ui: link's ui characteristics - existing_areas: existing areas from study - - Returns: - The created link - - Raises: - LinkCreationError if an area doesn't exist or existing areas have not been provided - """ - pass +import configparser +import os +from types import MappingProxyType +from typing import Optional, Any, Dict + +from antares.config.local_configuration import LocalConfiguration +from antares.exceptions.exceptions import LinkCreationError, CustomError +from antares.model.area import Area +from antares.model.link import ( + LinkProperties, + LinkUi, + Link, + LinkPropertiesLocal, + LinkUiLocal, +) +from antares.service.base_services import BaseLinkService +from antares.tools.contents_tool import sort_ini_sections + + +class LinkLocalService(BaseLinkService): + def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) -> None: + super().__init__(**kwargs) + self.config = config + self.study_name = study_name + + def create_link( + self, + area_from: Area, + area_to: Area, + properties: Optional[LinkProperties] = None, + ui: Optional[LinkUi] = None, + existing_areas: Optional[MappingProxyType[str, Area]] = None, + ) -> Link: + """ + Args: + area_from: area where the link goes from + area_to: area where the link goes to + properties: link's properties + ui: link's ui characteristics + existing_areas: existing areas from study + + Returns: + The created link + + Raises: + LinkCreationError if an area doesn't exist or existing areas have not been provided + """ + areas = dict(sorted({area_from.name: area_from, area_to.name: area_to}.items())) + + if existing_areas is not None: + for area in areas.keys(): + if area not in existing_areas: + raise LinkCreationError(area_from.name, area_to.name, f"{area} does not exist.") + else: + raise LinkCreationError(area_from.name, area_to.name, "Cannot verify existing areas.") + + area_from, area_to = areas.values() + + link_dir = self.config.study_path / "input/links" / area_from.name + os.makedirs(link_dir, exist_ok=True) + + local_properties = LinkPropertiesLocal(properties) if properties else LinkPropertiesLocal() + local_ui = LinkUiLocal(ui) if ui else LinkUiLocal() + + properties_ini_file = link_dir / "properties.ini" + properties_ini = configparser.ConfigParser() + + if properties_ini_file.is_file(): + with open(properties_ini_file, "r") as ini_file: + properties_ini.read_file(ini_file) + try: + properties_ini.add_section(area_to.name) + except configparser.DuplicateSectionError as e: + raise CustomError(f"Link exists already, section already exists in properties.ini:\n\n{e.message}") + ini_dict = dict(local_properties.ini_fields) + ini_dict.update(local_ui.ini_fields) + properties_ini[area_to.name] = self.sort_link_properties_dict(ini_dict) + + properties_ini = sort_ini_sections(properties_ini) + + with open(properties_ini_file, "w") as ini_file: + properties_ini.write(ini_file) + + return Link( + area_from=area_from, + area_to=area_to, + link_service=self, + properties=local_properties.yield_link_properties(), + ui=local_ui.yield_link_ui(), + ) + + def delete_link(self, link: Link) -> None: + raise NotImplementedError + + def update_link_properties(self, link: Link, properties: LinkProperties) -> LinkProperties: + raise NotImplementedError + + def update_link_ui(self, link: Link, ui: LinkUi) -> LinkUi: + raise NotImplementedError + + # TODO maybe put sorting functions together + @staticmethod + def sort_link_properties_dict(ini_dict: Dict[str, str]) -> Dict[str, str]: + dict_order = [ + "hurdles-cost", + "loop-flow", + "use-phase-shifter", + "transmission-capacities", + "asset-type", + "link-style", + "link-width", + "colorr", + "colorg", + "colorb", + "display-comments", + "filter-synthesis", + "filter-year-by-year", + ] + return dict(sorted(ini_dict.items(), key=lambda item: dict_order.index(item[0]))) + + def read_link(self, area_from: Area, area_to: Area) -> Link: + """ + Args: + area_from: area where the link goes from + area_to: area where the link goes to + properties: link's properties + ui: link's ui characteristics + existing_areas: existing areas from study + + Returns: + The created link + + Raises: + LinkCreationError if an area doesn't exist or existing areas have not been provided + """ + pass diff --git a/src/antares/service/local_services/renewable_local.py b/src/antares/service/local_services/renewable_local.py index c2b43322..6f253d51 100644 --- a/src/antares/service/local_services/renewable_local.py +++ b/src/antares/service/local_services/renewable_local.py @@ -1,29 +1,27 @@ -from typing import Any - -import pandas as pd - -from antares.config.local_configuration import LocalConfiguration -from antares.model.renewable import RenewableCluster, RenewableClusterProperties -from antares.service.base_services import BaseRenewableService - - -class RenewableLocalService(BaseRenewableService): - def __init__( - self, config: LocalConfiguration, study_name: str, **kwargs: Any - ) -> None: - super().__init__(**kwargs) - self.config = config - self.study_name = study_name - - def update_renewable_properties( - self, - renewable_cluster: RenewableCluster, - properties: RenewableClusterProperties, - ) -> RenewableClusterProperties: - raise NotImplementedError - - def get_renewable_matrix( - self, - renewable: RenewableCluster, - ) -> pd.DataFrame: - raise NotImplementedError +from typing import Any + +import pandas as pd + +from antares.config.local_configuration import LocalConfiguration +from antares.model.renewable import RenewableCluster, RenewableClusterProperties +from antares.service.base_services import BaseRenewableService + + +class RenewableLocalService(BaseRenewableService): + def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) -> None: + super().__init__(**kwargs) + self.config = config + self.study_name = study_name + + def update_renewable_properties( + self, + renewable_cluster: RenewableCluster, + properties: RenewableClusterProperties, + ) -> RenewableClusterProperties: + raise NotImplementedError + + def get_renewable_matrix( + self, + renewable: RenewableCluster, + ) -> pd.DataFrame: + raise NotImplementedError diff --git a/src/antares/service/local_services/st_storage_local.py b/src/antares/service/local_services/st_storage_local.py index 3293e870..1e14403f 100644 --- a/src/antares/service/local_services/st_storage_local.py +++ b/src/antares/service/local_services/st_storage_local.py @@ -1,19 +1,17 @@ -from typing import Any - -from antares.config.local_configuration import LocalConfiguration -from antares.model.st_storage import STStorage, STStorageProperties -from antares.service.base_services import BaseShortTermStorageService - - -class ShortTermStorageLocalService(BaseShortTermStorageService): - def __init__( - self, config: LocalConfiguration, study_name: str, **kwargs: Any - ) -> None: - super().__init__(**kwargs) - self.config = config - self.study_name = study_name - - def update_st_storage_properties( - self, st_storage: STStorage, properties: STStorageProperties - ) -> STStorageProperties: - raise NotImplementedError +from typing import Any + +from antares.config.local_configuration import LocalConfiguration +from antares.model.st_storage import STStorage, STStorageProperties +from antares.service.base_services import BaseShortTermStorageService + + +class ShortTermStorageLocalService(BaseShortTermStorageService): + def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) -> None: + super().__init__(**kwargs) + self.config = config + self.study_name = study_name + + def update_st_storage_properties( + self, st_storage: STStorage, properties: STStorageProperties + ) -> STStorageProperties: + raise NotImplementedError diff --git a/src/antares/service/local_services/study_local.py b/src/antares/service/local_services/study_local.py index 37e19302..0cc10b88 100644 --- a/src/antares/service/local_services/study_local.py +++ b/src/antares/service/local_services/study_local.py @@ -1,61 +1,53 @@ -from typing import Optional, Any -import os -import json - -from antares.config.local_configuration import LocalConfiguration -from antares.model.binding_constraint import BindingConstraint -from antares.model.settings import StudySettings -from antares.service.base_services import BaseStudyService - - -class StudyLocalService(BaseStudyService): - def __init__( - self, config: LocalConfiguration, study_name: str, **kwargs: Any - ) -> None: - super().__init__(**kwargs) - self._config = config - self._study_name = study_name - - @property - def study_id(self) -> str: - return self._study_name - - @property - def config(self) -> LocalConfiguration: - return self._config - - def update_study_settings(self, settings: StudySettings) -> Optional[StudySettings]: - raise NotImplementedError - - def delete_binding_constraint(self, constraint: BindingConstraint) -> None: - raise NotImplementedError - - def delete(self, children: bool) -> None: - raise NotImplementedError - - def read_areas(self) -> json: - local_path = self._config.local_path - patch_path = local_path / self._study_name / "patch.json" - if not os.path.exists(patch_path): - return json.loads( - f"Le fichier {patch_path} n'existe pas dans le dossier {local_path / self._study_name}" - ) - try: - with open(patch_path, "r") as file: - content = file.read() - try: - data = json.loads(content) - except json.JSONDecodeError: - return json.loads( - f"Le fichier {patch_path} ne contient pas du JSON valide" - ) - if "areas" in data: - areas = data["areas"] - if isinstance(areas, dict): - return list(areas.keys()) - else: - return json.loads( - f"The key 'areas' n'existe pas dans le fichier JSON" - ) - except IOError: - return f"Impossible de lire le fichier {patch_path}" +from typing import Optional, Any +import os +import json + +from antares.config.local_configuration import LocalConfiguration +from antares.model.binding_constraint import BindingConstraint +from antares.model.settings import StudySettings +from antares.service.base_services import BaseStudyService + + +class StudyLocalService(BaseStudyService): + def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) -> None: + super().__init__(**kwargs) + self._config = config + self._study_name = study_name + + @property + def study_id(self) -> str: + return self._study_name + + @property + def config(self) -> LocalConfiguration: + return self._config + + def update_study_settings(self, settings: StudySettings) -> Optional[StudySettings]: + raise NotImplementedError + + def delete_binding_constraint(self, constraint: BindingConstraint) -> None: + raise NotImplementedError + + def delete(self, children: bool) -> None: + raise NotImplementedError + + def read_areas(self) -> json: + local_path = self._config.local_path + patch_path = local_path / self._study_name / "patch.json" + if not os.path.exists(patch_path): + return json.loads(f"Le fichier {patch_path} n'existe pas dans le dossier {local_path / self._study_name}") + try: + with open(patch_path, "r") as file: + content = file.read() + try: + data = json.loads(content) + except json.JSONDecodeError: + return json.loads(f"Le fichier {patch_path} ne contient pas du JSON valide") + if "areas" in data: + areas = data["areas"] + if isinstance(areas, dict): + return list(areas.keys()) + else: + return json.loads(f"The key 'areas' n'existe pas dans le fichier JSON") + except IOError: + return f"Impossible de lire le fichier {patch_path}" diff --git a/src/antares/service/local_services/thermal_local.py b/src/antares/service/local_services/thermal_local.py index 25282354..34336527 100644 --- a/src/antares/service/local_services/thermal_local.py +++ b/src/antares/service/local_services/thermal_local.py @@ -1,30 +1,26 @@ -from typing import Any - -import pandas as pd - -from antares.config.local_configuration import LocalConfiguration -from antares.model.thermal import ( - ThermalCluster, - ThermalClusterMatrixName, - ThermalClusterProperties, -) -from antares.service.base_services import BaseThermalService - - -class ThermalLocalService(BaseThermalService): - def __init__( - self, config: LocalConfiguration, study_name: str, **kwargs: Any - ) -> None: - super().__init__(**kwargs) - self.config = config - self.study_name = study_name - - def update_thermal_properties( - self, thermal_cluster: ThermalCluster, properties: ThermalClusterProperties - ) -> ThermalClusterProperties: - raise NotImplementedError - - def get_thermal_matrix( - self, thermal_cluster: ThermalCluster, ts_name: ThermalClusterMatrixName - ) -> pd.DataFrame: - raise NotImplementedError +from typing import Any + +import pandas as pd + +from antares.config.local_configuration import LocalConfiguration +from antares.model.thermal import ( + ThermalCluster, + ThermalClusterMatrixName, + ThermalClusterProperties, +) +from antares.service.base_services import BaseThermalService + + +class ThermalLocalService(BaseThermalService): + def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) -> None: + super().__init__(**kwargs) + self.config = config + self.study_name = study_name + + def update_thermal_properties( + self, thermal_cluster: ThermalCluster, properties: ThermalClusterProperties + ) -> ThermalClusterProperties: + raise NotImplementedError + + def get_thermal_matrix(self, thermal_cluster: ThermalCluster, ts_name: ThermalClusterMatrixName) -> pd.DataFrame: + raise NotImplementedError diff --git a/src/antares/service/service_factory.py b/src/antares/service/service_factory.py index 2e50bd1c..f15a612d 100644 --- a/src/antares/service/service_factory.py +++ b/src/antares/service/service_factory.py @@ -1,185 +1,162 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from antares.api_conf.api_conf import APIconf -from antares.config.base_configuration import BaseConfiguration -from antares.config.local_configuration import LocalConfiguration -from antares.service.api_services.area_api import AreaApiService -from antares.service.api_services.binding_constraint_api import ( - BindingConstraintApiService, -) -from antares.service.api_services.link_api import LinkApiService -from antares.service.api_services.renewable_api import RenewableApiService -from antares.service.api_services.st_storage_api import ShortTermStorageApiService -from antares.service.api_services.study_api import StudyApiService -from antares.service.api_services.thermal_api import ThermalApiService -from antares.service.base_services import ( - BaseAreaService, - BaseLinkService, - BaseThermalService, - BaseBindingConstraintService, - BaseStudyService, - BaseRenewableService, - BaseShortTermStorageService, -) -from antares.service.local_services.area_local import AreaLocalService -from antares.service.local_services.binding_constraint_local import ( - BindingConstraintLocalService, -) -from antares.service.local_services.link_local import LinkLocalService -from antares.service.local_services.renewable_local import RenewableLocalService -from antares.service.local_services.st_storage_local import ShortTermStorageLocalService -from antares.service.local_services.study_local import StudyLocalService -from antares.service.local_services.thermal_local import ThermalLocalService - -ERROR_MESSAGE = "Unsupported configuration type: " - - -class ServiceFactory: - def __init__( - self, config: BaseConfiguration, study_id: str = "", study_name: str = "" - ): - self.config = config - self.study_id = study_id - self.study_name = study_name - - def create_area_service(self) -> BaseAreaService: - if isinstance(self.config, APIconf): - area_service: BaseAreaService = AreaApiService(self.config, self.study_id) - storage_service: BaseShortTermStorageService = ShortTermStorageApiService( - self.config, self.study_id - ) - thermal_service: BaseThermalService = ThermalApiService( - self.config, self.study_id - ) - renewable_service: BaseRenewableService = RenewableApiService( - self.config, self.study_id - ) - area_service.set_storage_service(storage_service) - area_service.set_thermal_service(thermal_service) - area_service.set_renewable_service(renewable_service) - elif isinstance(self.config, LocalConfiguration): - area_service = AreaLocalService(self.config, self.study_name) - storage_service = ShortTermStorageLocalService(self.config, self.study_name) - thermal_service = ThermalLocalService(self.config, self.study_name) - renewable_service = RenewableLocalService(self.config, self.study_name) - area_service.set_storage_service(storage_service) - area_service.set_thermal_service(thermal_service) - area_service.set_renewable_service(renewable_service) - else: - raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") - return area_service - - def create_link_service(self) -> BaseLinkService: - if isinstance(self.config, APIconf): - link_service: BaseLinkService = LinkApiService(self.config, self.study_id) - elif isinstance(self.config, LocalConfiguration): - link_service = LinkLocalService(self.config, self.study_name) - else: - raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") - return link_service - - def create_thermal_service(self) -> BaseThermalService: - if isinstance(self.config, APIconf): - thermal_service: BaseThermalService = ThermalApiService( - self.config, self.study_id - ) - elif isinstance(self.config, LocalConfiguration): - thermal_service = ThermalLocalService(self.config, self.study_name) - else: - raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") - return thermal_service - - def create_binding_constraints_service(self) -> BaseBindingConstraintService: - if isinstance(self.config, APIconf): - binding_constraint_service: BaseBindingConstraintService = ( - BindingConstraintApiService(self.config, self.study_id) - ) - elif isinstance(self.config, LocalConfiguration): - binding_constraint_service = BindingConstraintLocalService( - self.config, self.study_name - ) - else: - raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") - return binding_constraint_service - - def create_study_service(self) -> BaseStudyService: - if isinstance(self.config, APIconf): - study_service: BaseStudyService = StudyApiService( - self.config, self.study_id - ) - elif isinstance(self.config, LocalConfiguration): - study_service = StudyLocalService(self.config, self.study_name) - else: - raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") - return study_service - - def create_renewable_service(self) -> BaseRenewableService: - if isinstance(self.config, APIconf): - renewable_service: BaseRenewableService = RenewableApiService( - self.config, self.study_id - ) - elif isinstance(self.config, LocalConfiguration): - renewable_service = RenewableLocalService(self.config, self.study_name) - else: - raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") - return renewable_service - - def create_st_storage_service(self) -> BaseShortTermStorageService: - if isinstance(self.config, APIconf): - short_term_storage_service: BaseShortTermStorageService = ( - ShortTermStorageApiService(self.config, self.study_id) - ) - elif isinstance(self.config, LocalConfiguration): - short_term_storage_service = ShortTermStorageLocalService( - self.config, self.study_name - ) - else: - raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") - return short_term_storage_service - - -class ServiceReader: - def __init__( - self, config: BaseConfiguration, study_name: str = "", study_id: str = "" - ): - self.config = config - self.study_id = study_id - self.study_name = study_name - self._areas = [] - self._binding_constraints = [] - self._links = [] - - # we can have read area service here, for just one area - def read_study_service(self) -> BaseStudyService: - if isinstance(self.config, LocalConfiguration): - study_service: BaseStudyService = StudyLocalService( - self.config, self.study_name - ) - areas = study_service.read_areas() - for area_name in range(areas): - area_service: BaseStudyService = AreaLocalService( - self.config, self.study_name - ) - area = area_service.read_area(area_name) - self._areas[area.id] = area - - # bc_service: BaseStudyService = BindingConstraintLocalService(self.config, self.study_name) - # bc = bc_service.read_binding_constraint(area_name) - # self._areas[bc.id] = area - # link_service: BaseStudyService = LinkLocalService(self.config, self.study_name) - # link = link_service.read_link(area_name) - # self._areas[link.id] = area - else: - raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") - - return areas +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from antares.api_conf.api_conf import APIconf +from antares.config.base_configuration import BaseConfiguration +from antares.config.local_configuration import LocalConfiguration +from antares.service.api_services.area_api import AreaApiService +from antares.service.api_services.binding_constraint_api import ( + BindingConstraintApiService, +) +from antares.service.api_services.link_api import LinkApiService +from antares.service.api_services.renewable_api import RenewableApiService +from antares.service.api_services.st_storage_api import ShortTermStorageApiService +from antares.service.api_services.study_api import StudyApiService +from antares.service.api_services.thermal_api import ThermalApiService +from antares.service.base_services import ( + BaseAreaService, + BaseLinkService, + BaseThermalService, + BaseBindingConstraintService, + BaseStudyService, + BaseRenewableService, + BaseShortTermStorageService, +) +from antares.service.local_services.area_local import AreaLocalService +from antares.service.local_services.binding_constraint_local import ( + BindingConstraintLocalService, +) +from antares.service.local_services.link_local import LinkLocalService +from antares.service.local_services.renewable_local import RenewableLocalService +from antares.service.local_services.st_storage_local import ShortTermStorageLocalService +from antares.service.local_services.study_local import StudyLocalService +from antares.service.local_services.thermal_local import ThermalLocalService + +ERROR_MESSAGE = "Unsupported configuration type: " + + +class ServiceFactory: + def __init__(self, config: BaseConfiguration, study_id: str = "", study_name: str = ""): + self.config = config + self.study_id = study_id + self.study_name = study_name + + def create_area_service(self) -> BaseAreaService: + if isinstance(self.config, APIconf): + area_service: BaseAreaService = AreaApiService(self.config, self.study_id) + storage_service: BaseShortTermStorageService = ShortTermStorageApiService(self.config, self.study_id) + thermal_service: BaseThermalService = ThermalApiService(self.config, self.study_id) + renewable_service: BaseRenewableService = RenewableApiService(self.config, self.study_id) + area_service.set_storage_service(storage_service) + area_service.set_thermal_service(thermal_service) + area_service.set_renewable_service(renewable_service) + elif isinstance(self.config, LocalConfiguration): + area_service = AreaLocalService(self.config, self.study_name) + storage_service = ShortTermStorageLocalService(self.config, self.study_name) + thermal_service = ThermalLocalService(self.config, self.study_name) + renewable_service = RenewableLocalService(self.config, self.study_name) + area_service.set_storage_service(storage_service) + area_service.set_thermal_service(thermal_service) + area_service.set_renewable_service(renewable_service) + else: + raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") + return area_service + + def create_link_service(self) -> BaseLinkService: + if isinstance(self.config, APIconf): + link_service: BaseLinkService = LinkApiService(self.config, self.study_id) + elif isinstance(self.config, LocalConfiguration): + link_service = LinkLocalService(self.config, self.study_name) + else: + raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") + return link_service + + def create_thermal_service(self) -> BaseThermalService: + if isinstance(self.config, APIconf): + thermal_service: BaseThermalService = ThermalApiService(self.config, self.study_id) + elif isinstance(self.config, LocalConfiguration): + thermal_service = ThermalLocalService(self.config, self.study_name) + else: + raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") + return thermal_service + + def create_binding_constraints_service(self) -> BaseBindingConstraintService: + if isinstance(self.config, APIconf): + binding_constraint_service: BaseBindingConstraintService = BindingConstraintApiService( + self.config, self.study_id + ) + elif isinstance(self.config, LocalConfiguration): + binding_constraint_service = BindingConstraintLocalService(self.config, self.study_name) + else: + raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") + return binding_constraint_service + + def create_study_service(self) -> BaseStudyService: + if isinstance(self.config, APIconf): + study_service: BaseStudyService = StudyApiService(self.config, self.study_id) + elif isinstance(self.config, LocalConfiguration): + study_service = StudyLocalService(self.config, self.study_name) + else: + raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") + return study_service + + def create_renewable_service(self) -> BaseRenewableService: + if isinstance(self.config, APIconf): + renewable_service: BaseRenewableService = RenewableApiService(self.config, self.study_id) + elif isinstance(self.config, LocalConfiguration): + renewable_service = RenewableLocalService(self.config, self.study_name) + else: + raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") + return renewable_service + + def create_st_storage_service(self) -> BaseShortTermStorageService: + if isinstance(self.config, APIconf): + short_term_storage_service: BaseShortTermStorageService = ShortTermStorageApiService( + self.config, self.study_id + ) + elif isinstance(self.config, LocalConfiguration): + short_term_storage_service = ShortTermStorageLocalService(self.config, self.study_name) + else: + raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") + return short_term_storage_service + + +class ServiceReader: + def __init__(self, config: BaseConfiguration, study_name: str = "", study_id: str = ""): + self.config = config + self.study_id = study_id + self.study_name = study_name + self._areas = [] + self._binding_constraints = [] + self._links = [] + + # we can have read area service here, for just one area + def read_study_service(self) -> BaseStudyService: + if isinstance(self.config, LocalConfiguration): + study_service: BaseStudyService = StudyLocalService(self.config, self.study_name) + areas = study_service.read_areas() + for area_name in range(areas): + area_service: BaseStudyService = AreaLocalService(self.config, self.study_name) + + area = area_service.read_area(area_name) + self._areas[area.id] = area + + # bc_service: BaseStudyService = BindingConstraintLocalService(self.config, self.study_name) + # bc = bc_service.read_binding_constraint(area_name) + # self._areas[bc.id] = area + # link_service: BaseStudyService = LinkLocalService(self.config, self.study_name) + # link = link_service.read_link(area_name) + # self._areas[link.id] = area + else: + raise TypeError(f"{ERROR_MESSAGE}{repr(self.config)}") + + return areas diff --git a/src/antares/tools/contents_tool.py b/src/antares/tools/contents_tool.py index 7be58d0e..dd4f144f 100644 --- a/src/antares/tools/contents_tool.py +++ b/src/antares/tools/contents_tool.py @@ -1,126 +1,126 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import configparser -import json -import re -from enum import Enum -from pathlib import Path -from typing import Any, Dict, Optional - -from pydantic import BaseModel - -# Invalid chars was taken from Antares Simulator (C++). -_sub_invalid_chars = re.compile(r"[^a-zA-Z0-9_(),& -]+").sub - - -def transform_name_to_id(name: str) -> str: - """ - Transform a name into an identifier. - This method is used in AntaresWeb to construct their ids. - """ - return _sub_invalid_chars(" ", name).strip().lower() - - -def retrieve_file_content(file_to_retrieve: str) -> Dict[str, Any]: - module_path = Path(__file__).resolve().parent - - path_resources = module_path.parent.parent / "resources" - path_to_file = path_resources / file_to_retrieve - - with open(path_to_file, "r") as read_content: - return json.load(read_content) - - -def transform_ui_data_to_text(data_from_json: Dict[str, Any]) -> str: - """ - Args: - data_from_json: ini data to be inserted - - Returns: - str to be written in .ini file - """ - ini_content = "" - for key, value in data_from_json.items(): - if isinstance(value, dict): - section_header = f"[{key}]" - ini_content += f"{section_header}\n" - for inner_key, inner_value in value.items(): - if isinstance(inner_value, list): - inner_value_str = " , ".join(map(str, inner_value)) - ini_content += f"{inner_key} = {inner_value_str}\n" - else: - ini_content += f"{inner_key} = {inner_value}\n" - else: - ini_content += f"{key} = {value}\n" - - return ini_content - - -def extract_content(key: str, file_to_retrieve: str) -> str: - ini_data = retrieve_file_content(file_to_retrieve) - data_for_file = ini_data.get(key) - if data_for_file is not None: - return transform_ui_data_to_text(data_for_file) - else: - raise KeyError(f"Key '{key}' not defined in {file_to_retrieve}") - - -class EnumIgnoreCase(Enum): - @classmethod - def _missing_(cls, value: object) -> Optional["EnumIgnoreCase"]: - if isinstance(value, str): - for member in cls: - if member.value.upper() == value.upper(): - return member - return None - - -class AreaUiResponse(BaseModel): - """ - Utility class to convert the AntaresWebResponse to Antares-Craft object. - """ - - class MapResponse(BaseModel): - color_r: int - color_g: int - color_b: int - layers: int - x: int - y: int - - layerColor: Dict[str, str] - layerX: Dict[str, float] - layerY: Dict[str, float] - ui: MapResponse - - def to_craft(self) -> Dict[str, Any]: - json_ui = { - "layer": self.ui.layers, - "x": self.ui.x, - "y": self.ui.y, - "layer_x": self.layerX, - "layer_y": self.layerY, - "layer_color": self.layerColor, - "color_rgb": [self.ui.color_r, self.ui.color_g, self.ui.color_b], - } - return json_ui - - -# TODO maybe put sorting functions together -def sort_ini_sections( - ini_to_sort: configparser.ConfigParser, -) -> configparser.ConfigParser: - sorted_ini = configparser.ConfigParser() - for section in sorted(ini_to_sort.sections()): - sorted_ini[section] = ini_to_sort[section] - return sorted_ini +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import configparser +import json +import re +from enum import Enum +from pathlib import Path +from typing import Any, Dict, Optional + +from pydantic import BaseModel + +# Invalid chars was taken from Antares Simulator (C++). +_sub_invalid_chars = re.compile(r"[^a-zA-Z0-9_(),& -]+").sub + + +def transform_name_to_id(name: str) -> str: + """ + Transform a name into an identifier. + This method is used in AntaresWeb to construct their ids. + """ + return _sub_invalid_chars(" ", name).strip().lower() + + +def retrieve_file_content(file_to_retrieve: str) -> Dict[str, Any]: + module_path = Path(__file__).resolve().parent + + path_resources = module_path.parent.parent / "resources" + path_to_file = path_resources / file_to_retrieve + + with open(path_to_file, "r") as read_content: + return json.load(read_content) + + +def transform_ui_data_to_text(data_from_json: Dict[str, Any]) -> str: + """ + Args: + data_from_json: ini data to be inserted + + Returns: + str to be written in .ini file + """ + ini_content = "" + for key, value in data_from_json.items(): + if isinstance(value, dict): + section_header = f"[{key}]" + ini_content += f"{section_header}\n" + for inner_key, inner_value in value.items(): + if isinstance(inner_value, list): + inner_value_str = " , ".join(map(str, inner_value)) + ini_content += f"{inner_key} = {inner_value_str}\n" + else: + ini_content += f"{inner_key} = {inner_value}\n" + else: + ini_content += f"{key} = {value}\n" + + return ini_content + + +def extract_content(key: str, file_to_retrieve: str) -> str: + ini_data = retrieve_file_content(file_to_retrieve) + data_for_file = ini_data.get(key) + if data_for_file is not None: + return transform_ui_data_to_text(data_for_file) + else: + raise KeyError(f"Key '{key}' not defined in {file_to_retrieve}") + + +class EnumIgnoreCase(Enum): + @classmethod + def _missing_(cls, value: object) -> Optional["EnumIgnoreCase"]: + if isinstance(value, str): + for member in cls: + if member.value.upper() == value.upper(): + return member + return None + + +class AreaUiResponse(BaseModel): + """ + Utility class to convert the AntaresWebResponse to Antares-Craft object. + """ + + class MapResponse(BaseModel): + color_r: int + color_g: int + color_b: int + layers: int + x: int + y: int + + layerColor: Dict[str, str] + layerX: Dict[str, float] + layerY: Dict[str, float] + ui: MapResponse + + def to_craft(self) -> Dict[str, Any]: + json_ui = { + "layer": self.ui.layers, + "x": self.ui.x, + "y": self.ui.y, + "layer_x": self.layerX, + "layer_y": self.layerY, + "layer_color": self.layerColor, + "color_rgb": [self.ui.color_r, self.ui.color_g, self.ui.color_b], + } + return json_ui + + +# TODO maybe put sorting functions together +def sort_ini_sections( + ini_to_sort: configparser.ConfigParser, +) -> configparser.ConfigParser: + sorted_ini = configparser.ConfigParser() + for section in sorted(ini_to_sort.sections()): + sorted_ini[section] = ini_to_sort[section] + return sorted_ini diff --git a/src/antares/tools/ini_tool.py b/src/antares/tools/ini_tool.py index 1e313671..374dc8e2 100644 --- a/src/antares/tools/ini_tool.py +++ b/src/antares/tools/ini_tool.py @@ -1,160 +1,144 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from configparser import ConfigParser -from enum import Enum -from pathlib import Path -from typing import Optional, Any, overload - - -class IniFileTypes(Enum): - """ - The different ini files in an Antares project, files that are created for each area require using - format(area_name=) to get the complete path - """ - - # TODO: Commented inis are not yet handled - # ANTARES = "study.antares" - # DESKTOP = "Desktop.ini" - # GENERAL = "settings/generaldata.ini" - # SCENARIO = "settings/scenariobuilder.dat" - AREAS_SETS_INI = "input/areas/sets.ini" - AREAS_LIST_TXT = "input/areas/list.txt" - AREA_OPTIMIZATION_INI = "input/areas/{area_name}/optimization.ini" - AREA_UI_INI = "input/areas/{area_name}/ui.ini" - AREA_ADEQUACY_PATCH_INI = "input/areas/{area_name}/adequacy_patch.ini" - HYDRO_INI = "input/hydro/hydro.ini" - LINK_PROPERTIES_INI = "input/links/{area_name}/properties.ini" - RENEWABLES_LIST_INI = "input/renewables/clusters/{area_name}/list.ini" - ST_STORAGE_LIST_INI = "input/st-storage/clusters/{area_name}/list.ini" - THERMAL_AREAS_INI = "input/thermal/areas.ini" - THERMAL_LIST_INI = "input/thermal/clusters/{area_name}/list.ini" - - -class IniFile: - def __init__( - self, - study_path: Path, - ini_file_type: IniFileTypes, - area_name: Optional[str] = None, - ini_contents: Optional[ConfigParser] = None, - ) -> None: - if "{area_name}" in ini_file_type.value and not area_name: - raise ValueError( - f"Area name not provided, ini type {ini_file_type.name} requires 'area_name'" - ) - self._full_path = study_path / ( - ini_file_type.value.format(area_name=area_name) - if ("{area_name}" in ini_file_type.value and area_name) - else ini_file_type.value - ) - self._file_name = self._full_path.name - self._file_path = self._full_path.parent - self._ini_contents = ini_contents or ConfigParser() - if self._full_path.is_file(): - self.update_from_ini_file() - else: - self.write_ini_file() - - @property - def ini_dict(self) -> dict: - """Ini contents as a python dictionary""" - return { - section: dict(self._ini_contents[section]) - for section in self._ini_contents.sections() - } - - @ini_dict.setter - def ini_dict(self, new_ini_dict: dict[str, dict[str, str]]) -> None: - self._ini_contents = ConfigParser() - self._ini_contents.read_dict(new_ini_dict) - - @property - def parsed_ini(self) -> ConfigParser: - """Ini contents as a ConfigParser""" - return self._ini_contents - - @parsed_ini.setter - def parsed_ini(self, new_ini_contents: ConfigParser) -> None: - self._ini_contents = new_ini_contents - - @property - def ini_path(self) -> Path: - """Ini path""" - return self._full_path - - @overload - def add_section(self, section: Path) -> None: ... - - @overload - def add_section(self, section: dict[str, dict[str, str]]) -> None: ... - - def add_section(self, section: Any) -> None: - if isinstance(section, dict): - self._ini_contents.read_dict(section) - elif isinstance(section, Path): - with section.open() as ini_file: - self._ini_contents.read_file(ini_file) - else: - raise TypeError("Only dict or Path are allowed") - - def update_from_ini_file(self) -> None: - if not self._full_path.is_file(): - raise FileNotFoundError(f"No such file: {self._full_path}") - - parsed_ini = ConfigParser() - with self._full_path.open() as file: - parsed_ini.read_file(file) - - self._ini_contents = parsed_ini - - def write_ini_file( - self, - sort_sections: bool = False, - sort_section_content: bool = False, - ) -> None: - if not self._file_path.is_dir(): - self._file_path.mkdir(parents=True) - ini_to_write = ( - self._ini_contents - if not sort_sections - else self._sort_ini_sections(self._ini_contents) - ) - ini_to_write = ( - ini_to_write - if not sort_section_content - else self._sort_ini_section_content(ini_to_write) - ) - - with self._full_path.open("w") as file: - ini_to_write.write(file) - - @staticmethod - def _sort_ini_sections(ini_to_sort: ConfigParser) -> ConfigParser: - sorted_ini = ConfigParser() - for section in sorted(ini_to_sort.sections()): - sorted_ini[section] = ini_to_sort[section] - return sorted_ini - - @staticmethod - def _sort_ini_section_content(ini_to_sort: ConfigParser) -> ConfigParser: - sorted_ini = ConfigParser() - for section in ini_to_sort.sections(): - sorted_ini[section] = { - key: value - for (key, value) in sorted(list(ini_to_sort[section].items())) - } - return sorted_ini - - -def check_if_none(value_to_check: Any, default_value: Any) -> Any: - return value_to_check if value_to_check is not None else default_value +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from configparser import ConfigParser +from enum import Enum +from pathlib import Path +from typing import Optional, Any, overload + + +class IniFileTypes(Enum): + """ + The different ini files in an Antares project, files that are created for each area require using + format(area_name=) to get the complete path + """ + + # TODO: Commented inis are not yet handled + # ANTARES = "study.antares" + # DESKTOP = "Desktop.ini" + # GENERAL = "settings/generaldata.ini" + # SCENARIO = "settings/scenariobuilder.dat" + AREAS_SETS_INI = "input/areas/sets.ini" + AREAS_LIST_TXT = "input/areas/list.txt" + AREA_OPTIMIZATION_INI = "input/areas/{area_name}/optimization.ini" + AREA_UI_INI = "input/areas/{area_name}/ui.ini" + AREA_ADEQUACY_PATCH_INI = "input/areas/{area_name}/adequacy_patch.ini" + HYDRO_INI = "input/hydro/hydro.ini" + LINK_PROPERTIES_INI = "input/links/{area_name}/properties.ini" + RENEWABLES_LIST_INI = "input/renewables/clusters/{area_name}/list.ini" + ST_STORAGE_LIST_INI = "input/st-storage/clusters/{area_name}/list.ini" + THERMAL_AREAS_INI = "input/thermal/areas.ini" + THERMAL_LIST_INI = "input/thermal/clusters/{area_name}/list.ini" + + +class IniFile: + def __init__( + self, + study_path: Path, + ini_file_type: IniFileTypes, + area_name: Optional[str] = None, + ini_contents: Optional[ConfigParser] = None, + ) -> None: + if "{area_name}" in ini_file_type.value and not area_name: + raise ValueError(f"Area name not provided, ini type {ini_file_type.name} requires 'area_name'") + self._full_path = study_path / ( + ini_file_type.value.format(area_name=area_name) + if ("{area_name}" in ini_file_type.value and area_name) + else ini_file_type.value + ) + self._file_name = self._full_path.name + self._file_path = self._full_path.parent + self._ini_contents = ini_contents or ConfigParser() + if self._full_path.is_file(): + self.update_from_ini_file() + else: + self.write_ini_file() + + @property + def ini_dict(self) -> dict: + """Ini contents as a python dictionary""" + return {section: dict(self._ini_contents[section]) for section in self._ini_contents.sections()} + + @ini_dict.setter + def ini_dict(self, new_ini_dict: dict[str, dict[str, str]]) -> None: + self._ini_contents = ConfigParser() + self._ini_contents.read_dict(new_ini_dict) + + @property + def parsed_ini(self) -> ConfigParser: + """Ini contents as a ConfigParser""" + return self._ini_contents + + @parsed_ini.setter + def parsed_ini(self, new_ini_contents: ConfigParser) -> None: + self._ini_contents = new_ini_contents + + @property + def ini_path(self) -> Path: + """Ini path""" + return self._full_path + + @overload + def add_section(self, section: Path) -> None: ... + + @overload + def add_section(self, section: dict[str, dict[str, str]]) -> None: ... + + def add_section(self, section: Any) -> None: + if isinstance(section, dict): + self._ini_contents.read_dict(section) + elif isinstance(section, Path): + with section.open() as ini_file: + self._ini_contents.read_file(ini_file) + else: + raise TypeError("Only dict or Path are allowed") + + def update_from_ini_file(self) -> None: + if not self._full_path.is_file(): + raise FileNotFoundError(f"No such file: {self._full_path}") + + parsed_ini = ConfigParser() + with self._full_path.open() as file: + parsed_ini.read_file(file) + + self._ini_contents = parsed_ini + + def write_ini_file( + self, + sort_sections: bool = False, + sort_section_content: bool = False, + ) -> None: + if not self._file_path.is_dir(): + self._file_path.mkdir(parents=True) + ini_to_write = self._ini_contents if not sort_sections else self._sort_ini_sections(self._ini_contents) + ini_to_write = ini_to_write if not sort_section_content else self._sort_ini_section_content(ini_to_write) + + with self._full_path.open("w") as file: + ini_to_write.write(file) + + @staticmethod + def _sort_ini_sections(ini_to_sort: ConfigParser) -> ConfigParser: + sorted_ini = ConfigParser() + for section in sorted(ini_to_sort.sections()): + sorted_ini[section] = ini_to_sort[section] + return sorted_ini + + @staticmethod + def _sort_ini_section_content(ini_to_sort: ConfigParser) -> ConfigParser: + sorted_ini = ConfigParser() + for section in ini_to_sort.sections(): + sorted_ini[section] = {key: value for (key, value) in sorted(list(ini_to_sort[section].items()))} + return sorted_ini + + +def check_if_none(value_to_check: Any, default_value: Any) -> Any: + return value_to_check if value_to_check is not None else default_value diff --git a/src/antares/tools/matrix_tool.py b/src/antares/tools/matrix_tool.py index 897764fe..f6debbcb 100644 --- a/src/antares/tools/matrix_tool.py +++ b/src/antares/tools/matrix_tool.py @@ -1,30 +1,30 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from typing import Dict - -import pandas as pd - - -def prepare_args_replace_matrix(series: pd.DataFrame, series_path: str) -> Dict: - """ - - Args: - series: matrix to be created in AntaresWeb with command "replace_matrix" - series_path: Antares study path for matrix - - Returns: - Dictionary containing command action and its arguments. - """ - matrix = series.to_numpy().tolist() - body = {"target": series_path, "matrix": matrix} - return {"action": "replace_matrix", "args": body} +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from typing import Dict + +import pandas as pd + + +def prepare_args_replace_matrix(series: pd.DataFrame, series_path: str) -> Dict: + """ + + Args: + series: matrix to be created in AntaresWeb with command "replace_matrix" + series_path: Antares study path for matrix + + Returns: + Dictionary containing command action and its arguments. + """ + matrix = series.to_numpy().tolist() + body = {"target": series_path, "matrix": matrix} + return {"action": "replace_matrix", "args": body} diff --git a/src/antares/tools/time_series_tool.py b/src/antares/tools/time_series_tool.py index 605ffa2c..321a9972 100644 --- a/src/antares/tools/time_series_tool.py +++ b/src/antares/tools/time_series_tool.py @@ -1,127 +1,117 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from enum import Enum -from pathlib import Path -from typing import Optional - -import pandas as pd - - -class TimeSeriesFileType(Enum): - """ - The relative paths to different timeseries files used in the generation of an Antares study, starting from the - base folder of the study. - - Files where the path contains {area_id} have to be used with .format(area_id=) where is the id - in question to access the correct path. - """ - - MISC_GEN = "input/misc-gen/miscgen-{area_id}.txt" - RESERVES = "input/reserves/{area_id}.txt" - SOLAR = "input/solar/series/solar_{area_id}.txt" - WIND = "input/wind/series/wind_{area_id}.txt" - - -class TimeSeriesFile: - """ - Handling time series files reading and writing locally. - - Time series are stored without headers in tab separated files, encoded with UTF-8. - - Args: - ts_file_type: Type of time series file using the class TimeSeriesFileType. - study_path: `Path` to the study directory. - area_id: Area ID for file paths that use the area's id in their path - time_series: The actual timeseries as a pandas DataFrame. - - Raises: - ValueError if the TimeSeriesFileType needs an area_id and none is provided. - """ - - def __init__( - self, - ts_file_type: TimeSeriesFileType, - study_path: Path, - area_id: Optional[str] = None, - time_series: Optional[pd.DataFrame] = None, - ) -> None: - if "{area_id}" in ts_file_type.value and area_id is None: - raise ValueError("area_id is required for this file type.") - - self.file_path = study_path / ( - ts_file_type.value - if not area_id - else ts_file_type.value.format(area_id=area_id) - ) - - if self.file_path.is_file() and time_series is not None: - raise ValueError( - f"File {self.file_path} already exists and a time series was provided." - ) - elif self.file_path.is_file() and time_series is None: - self._time_series = pd.read_csv( - self.file_path, sep="\t", header=None, index_col=None, encoding="utf-8" - ) - else: - self._time_series = ( - time_series if time_series is not None else pd.DataFrame([]) - ) - self._write_file() - - @property - def time_series(self) -> pd.DataFrame: - return self._time_series - - @time_series.setter - def time_series(self, time_series: pd.DataFrame) -> None: - self._time_series = time_series - self._write_file() - - def _write_file(self) -> None: - self.file_path.parent.mkdir(parents=True, exist_ok=True) - self._time_series.to_csv( - self.file_path, sep="\t", header=False, index=False, encoding="utf-8" - ) - - -class TimeSeries: - """ - A time series for use in Antares - """ - - def __init__( - self, - time_series: pd.DataFrame = pd.DataFrame([]), - local_file: Optional[TimeSeriesFile] = None, - ) -> None: - self._time_series = time_series - self._local_file = local_file - - @property - def time_series(self) -> pd.DataFrame: - return self._time_series - - @time_series.setter - def time_series(self, time_series: pd.DataFrame) -> None: - self._time_series = time_series - if self._local_file is not None: - self._local_file.time_series = time_series - - @property - def local_file(self) -> Optional[TimeSeriesFile]: - return self._local_file - - @local_file.setter - def local_file(self, local_file: TimeSeriesFile) -> None: - self._local_file = local_file - self._time_series = local_file.time_series +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from enum import Enum +from pathlib import Path +from typing import Optional + +import pandas as pd + + +class TimeSeriesFileType(Enum): + """ + The relative paths to different timeseries files used in the generation of an Antares study, starting from the + base folder of the study. + + Files where the path contains {area_id} have to be used with .format(area_id=) where is the id + in question to access the correct path. + """ + + MISC_GEN = "input/misc-gen/miscgen-{area_id}.txt" + RESERVES = "input/reserves/{area_id}.txt" + SOLAR = "input/solar/series/solar_{area_id}.txt" + WIND = "input/wind/series/wind_{area_id}.txt" + + +class TimeSeriesFile: + """ + Handling time series files reading and writing locally. + + Time series are stored without headers in tab separated files, encoded with UTF-8. + + Args: + ts_file_type: Type of time series file using the class TimeSeriesFileType. + study_path: `Path` to the study directory. + area_id: Area ID for file paths that use the area's id in their path + time_series: The actual timeseries as a pandas DataFrame. + + Raises: + ValueError if the TimeSeriesFileType needs an area_id and none is provided. + """ + + def __init__( + self, + ts_file_type: TimeSeriesFileType, + study_path: Path, + area_id: Optional[str] = None, + time_series: Optional[pd.DataFrame] = None, + ) -> None: + if "{area_id}" in ts_file_type.value and area_id is None: + raise ValueError("area_id is required for this file type.") + + self.file_path = study_path / ( + ts_file_type.value if not area_id else ts_file_type.value.format(area_id=area_id) + ) + + if self.file_path.is_file() and time_series is not None: + raise ValueError(f"File {self.file_path} already exists and a time series was provided.") + elif self.file_path.is_file() and time_series is None: + self._time_series = pd.read_csv(self.file_path, sep="\t", header=None, index_col=None, encoding="utf-8") + else: + self._time_series = time_series if time_series is not None else pd.DataFrame([]) + self._write_file() + + @property + def time_series(self) -> pd.DataFrame: + return self._time_series + + @time_series.setter + def time_series(self, time_series: pd.DataFrame) -> None: + self._time_series = time_series + self._write_file() + + def _write_file(self) -> None: + self.file_path.parent.mkdir(parents=True, exist_ok=True) + self._time_series.to_csv(self.file_path, sep="\t", header=False, index=False, encoding="utf-8") + + +class TimeSeries: + """ + A time series for use in Antares + """ + + def __init__( + self, + time_series: pd.DataFrame = pd.DataFrame([]), + local_file: Optional[TimeSeriesFile] = None, + ) -> None: + self._time_series = time_series + self._local_file = local_file + + @property + def time_series(self) -> pd.DataFrame: + return self._time_series + + @time_series.setter + def time_series(self, time_series: pd.DataFrame) -> None: + self._time_series = time_series + if self._local_file is not None: + self._local_file.time_series = time_series + + @property + def local_file(self) -> Optional[TimeSeriesFile]: + return self._local_file + + @local_file.setter + def local_file(self, local_file: TimeSeriesFile) -> None: + self._local_file = local_file + self._time_series = local_file.time_series diff --git a/tests/antares/delete/test_delete_api.py b/tests/antares/delete/test_delete_api.py index 954372a4..a5503a72 100644 --- a/tests/antares/delete/test_delete_api.py +++ b/tests/antares/delete/test_delete_api.py @@ -1,186 +1,216 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import pytest -import requests_mock - -from antares.api_conf.api_conf import APIconf -from antares.exceptions.exceptions import ( - AreaDeletionError, - LinkDeletionError, - ThermalDeletionError, - RenewableDeletionError, - STStorageDeletionError, - BindingConstraintDeletionError, - ConstraintTermDeletionError, -) -from antares.model.area import Area -from antares.model.binding_constraint import BindingConstraint -from antares.model.link import Link -from antares.model.renewable import RenewableCluster -from antares.model.st_storage import STStorage -from antares.model.thermal import ThermalCluster -from antares.service.api_services.area_api import AreaApiService -from antares.service.api_services.binding_constraint_api import BindingConstraintApiService -from antares.service.api_services.link_api import LinkApiService -from antares.service.api_services.renewable_api import RenewableApiService -from antares.service.api_services.st_storage_api import ShortTermStorageApiService -from antares.service.api_services.study_api import StudyApiService -from antares.service.api_services.thermal_api import ThermalApiService - - -class TestDeleteAPI: - api = APIconf("https://antares.com", "token", verify=False) - study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" - study_service = StudyApiService(api, study_id) - area_service = AreaApiService(api, study_id) - thermal_service = ThermalApiService(api, study_id) - renewable_service = RenewableApiService(api, study_id) - st_storage_service = ShortTermStorageApiService(api, study_id) - area_fr = Area("fr", area_service, st_storage_service, thermal_service, renewable_service) - area_be = Area("be", area_service, st_storage_service, thermal_service, renewable_service) - link_service = LinkApiService(api, study_id) - constraint_service = BindingConstraintApiService(api, study_id) - antares_web_description_msg = "Mocked Server KO" - - def test_delete_area_success(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}" - mocker.delete(url, status_code=200) - self.area_service.delete_area(area=self.area_fr) - - def test_delete_area_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}" - mocker.delete(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - AreaDeletionError, - match=f"Could not delete the area {self.area_fr.id}: {self.antares_web_description_msg}", - ): - self.area_service.delete_area(area=self.area_fr) - - def test_delete_link_success(self): - with requests_mock.Mocker() as mocker: - link = Link(self.area_be, self.area_fr, self.link_service) - url = f"https://antares.com/api/v1/studies/{self.study_id}/links/{self.area_be.id}/{self.area_fr.id}" - mocker.delete(url, status_code=200) - self.link_service.delete_link(link) - - def test_delete_link_fails(self): - with requests_mock.Mocker() as mocker: - link = Link(self.area_fr, self.area_be, self.link_service) - url = f"https://antares.com/api/v1/studies/{self.study_id}/links/{self.area_fr.id}/{self.area_be.id}" - mocker.delete(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - LinkDeletionError, - match=f"Could not delete the link {self.area_fr.id} / {self.area_be.id}: {self.antares_web_description_msg}", - ): - self.link_service.delete_link(link) - - def test_delete_thermal_success(self): - with requests_mock.Mocker() as mocker: - cluster = ThermalCluster(self.thermal_service, self.area_fr.id, "gaz_cluster") - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/clusters/thermal" - mocker.delete(url, status_code=200) - self.area_service.delete_thermal_clusters(self.area_fr, [cluster]) - - def test_delete_thermal_fails(self): - with requests_mock.Mocker() as mocker: - cluster1 = ThermalCluster(self.thermal_service, self.area_fr.id, "gaz_cluster") - cluster2 = ThermalCluster(self.thermal_service, self.area_fr.id, "gaz_cluster_2") - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/clusters/thermal" - mocker.delete(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - ThermalDeletionError, - match=f"Could not delete the following thermal clusters: gaz_cluster, gaz_cluster_2 inside area fr: {self.antares_web_description_msg}", - ): - self.area_service.delete_thermal_clusters(self.area_fr, [cluster1, cluster2]) - - def test_delete_renewable_success(self): - with requests_mock.Mocker() as mocker: - cluster = RenewableCluster(self.renewable_service, self.area_fr.id, "gaz_cluster") - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/clusters/renewable" - mocker.delete(url, status_code=200) - self.area_service.delete_renewable_clusters(self.area_fr, [cluster]) - - def test_delete_renewable_fails(self): - with requests_mock.Mocker() as mocker: - cluster = RenewableCluster(self.renewable_service, self.area_fr.id, "gaz_cluster") - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/clusters/renewable" - mocker.delete(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - RenewableDeletionError, - match=f"Could not delete the following renewable clusters: gaz_cluster inside area fr: {self.antares_web_description_msg}", - ): - self.area_service.delete_renewable_clusters(self.area_fr, [cluster]) - - def test_delete_st_storage_success(self): - with requests_mock.Mocker() as mocker: - storage = STStorage(self.st_storage_service, self.area_fr.id, "battery_fr") - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/storages" - mocker.delete(url, status_code=200) - self.area_service.delete_st_storages(self.area_fr, [storage]) - - def test_delete_st_storage_fails(self): - with requests_mock.Mocker() as mocker: - storage = STStorage(self.st_storage_service, self.area_fr.id, "battery_fr") - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/storages" - mocker.delete(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - STStorageDeletionError, - match=f"Could not delete the following short term storages: battery_fr inside area fr: {self.antares_web_description_msg}", - ): - self.area_service.delete_st_storages(self.area_fr, [storage]) - - def test_delete_binding_constraint_success(self): - with requests_mock.Mocker() as mocker: - constraint_id = "bc_1" - constraint = BindingConstraint(constraint_id, self.constraint_service) - url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint_id}" - mocker.delete(url, status_code=200) - self.study_service.delete_binding_constraint(constraint) - - def test_delete_binding_constraint_fails(self): - with requests_mock.Mocker() as mocker: - constraint_id = "bc_1" - constraint = BindingConstraint(constraint_id, self.constraint_service) - url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint_id}" - mocker.delete(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - BindingConstraintDeletionError, - match=f"Could not delete the binding constraint {constraint_id}: {self.antares_web_description_msg}", - ): - self.study_service.delete_binding_constraint(constraint) - - def test_delete_constraint_terms_success(self): - with requests_mock.Mocker() as mocker: - constraint_id = "bc_1" - term_id = "term_1" - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint_id}/term/{term_id}" - ) - mocker.delete(url, status_code=200) - self.constraint_service.delete_binding_constraint_term(constraint_id, term_id) - - def test_delete_constraint_terms_fails(self): - with requests_mock.Mocker() as mocker: - constraint_id = "bc_1" - term_id = "term_1" - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint_id}/term/{term_id}" - ) - mocker.delete(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - ConstraintTermDeletionError, - match=f"Could not delete the term {term_id} of the binding constraint {constraint_id}: {self.antares_web_description_msg}", - ): - self.constraint_service.delete_binding_constraint_term(constraint_id, term_id) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import pytest +import requests_mock + +from antares.api_conf.api_conf import APIconf +from antares.exceptions.exceptions import ( + AreaDeletionError, + LinkDeletionError, + ThermalDeletionError, + RenewableDeletionError, + STStorageDeletionError, + BindingConstraintDeletionError, + ConstraintTermDeletionError, +) +from antares.model.area import Area +from antares.model.binding_constraint import BindingConstraint +from antares.model.link import Link +from antares.model.renewable import RenewableCluster +from antares.model.st_storage import STStorage +from antares.model.thermal import ThermalCluster +from antares.service.api_services.area_api import AreaApiService +from antares.service.api_services.binding_constraint_api import ( + BindingConstraintApiService, +) +from antares.service.api_services.link_api import LinkApiService +from antares.service.api_services.renewable_api import RenewableApiService +from antares.service.api_services.st_storage_api import ShortTermStorageApiService +from antares.service.api_services.study_api import StudyApiService +from antares.service.api_services.thermal_api import ThermalApiService + + +class TestDeleteAPI: + api = APIconf("https://antares.com", "token", verify=False) + study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" + study_service = StudyApiService(api, study_id) + area_service = AreaApiService(api, study_id) + thermal_service = ThermalApiService(api, study_id) + renewable_service = RenewableApiService(api, study_id) + st_storage_service = ShortTermStorageApiService(api, study_id) + area_fr = Area("fr", area_service, st_storage_service, thermal_service, renewable_service) + area_be = Area("be", area_service, st_storage_service, thermal_service, renewable_service) + link_service = LinkApiService(api, study_id) + constraint_service = BindingConstraintApiService(api, study_id) + antares_web_description_msg = "Mocked Server KO" + + def test_delete_area_success(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}" + mocker.delete(url, status_code=200) + self.area_service.delete_area(area=self.area_fr) + + def test_delete_area_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}" + mocker.delete( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + AreaDeletionError, + match=f"Could not delete the area {self.area_fr.id}: {self.antares_web_description_msg}", + ): + self.area_service.delete_area(area=self.area_fr) + + def test_delete_link_success(self): + with requests_mock.Mocker() as mocker: + link = Link(self.area_be, self.area_fr, self.link_service) + url = f"https://antares.com/api/v1/studies/{self.study_id}/links/{self.area_be.id}/{self.area_fr.id}" + mocker.delete(url, status_code=200) + self.link_service.delete_link(link) + + def test_delete_link_fails(self): + with requests_mock.Mocker() as mocker: + link = Link(self.area_fr, self.area_be, self.link_service) + url = f"https://antares.com/api/v1/studies/{self.study_id}/links/{self.area_fr.id}/{self.area_be.id}" + mocker.delete( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + LinkDeletionError, + match=f"Could not delete the link {self.area_fr.id} / {self.area_be.id}: {self.antares_web_description_msg}", + ): + self.link_service.delete_link(link) + + def test_delete_thermal_success(self): + with requests_mock.Mocker() as mocker: + cluster = ThermalCluster(self.thermal_service, self.area_fr.id, "gaz_cluster") + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/clusters/thermal" + mocker.delete(url, status_code=200) + self.area_service.delete_thermal_clusters(self.area_fr, [cluster]) + + def test_delete_thermal_fails(self): + with requests_mock.Mocker() as mocker: + cluster1 = ThermalCluster(self.thermal_service, self.area_fr.id, "gaz_cluster") + cluster2 = ThermalCluster(self.thermal_service, self.area_fr.id, "gaz_cluster_2") + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/clusters/thermal" + mocker.delete( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + ThermalDeletionError, + match=f"Could not delete the following thermal clusters: gaz_cluster, gaz_cluster_2 inside area fr: {self.antares_web_description_msg}", + ): + self.area_service.delete_thermal_clusters(self.area_fr, [cluster1, cluster2]) + + def test_delete_renewable_success(self): + with requests_mock.Mocker() as mocker: + cluster = RenewableCluster(self.renewable_service, self.area_fr.id, "gaz_cluster") + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/clusters/renewable" + mocker.delete(url, status_code=200) + self.area_service.delete_renewable_clusters(self.area_fr, [cluster]) + + def test_delete_renewable_fails(self): + with requests_mock.Mocker() as mocker: + cluster = RenewableCluster(self.renewable_service, self.area_fr.id, "gaz_cluster") + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/clusters/renewable" + mocker.delete( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + RenewableDeletionError, + match=f"Could not delete the following renewable clusters: gaz_cluster inside area fr: {self.antares_web_description_msg}", + ): + self.area_service.delete_renewable_clusters(self.area_fr, [cluster]) + + def test_delete_st_storage_success(self): + with requests_mock.Mocker() as mocker: + storage = STStorage(self.st_storage_service, self.area_fr.id, "battery_fr") + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/storages" + mocker.delete(url, status_code=200) + self.area_service.delete_st_storages(self.area_fr, [storage]) + + def test_delete_st_storage_fails(self): + with requests_mock.Mocker() as mocker: + storage = STStorage(self.st_storage_service, self.area_fr.id, "battery_fr") + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area_fr.id}/storages" + mocker.delete( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + STStorageDeletionError, + match=f"Could not delete the following short term storages: battery_fr inside area fr: {self.antares_web_description_msg}", + ): + self.area_service.delete_st_storages(self.area_fr, [storage]) + + def test_delete_binding_constraint_success(self): + with requests_mock.Mocker() as mocker: + constraint_id = "bc_1" + constraint = BindingConstraint(constraint_id, self.constraint_service) + url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint_id}" + mocker.delete(url, status_code=200) + self.study_service.delete_binding_constraint(constraint) + + def test_delete_binding_constraint_fails(self): + with requests_mock.Mocker() as mocker: + constraint_id = "bc_1" + constraint = BindingConstraint(constraint_id, self.constraint_service) + url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint_id}" + mocker.delete( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + BindingConstraintDeletionError, + match=f"Could not delete the binding constraint {constraint_id}: {self.antares_web_description_msg}", + ): + self.study_service.delete_binding_constraint(constraint) + + def test_delete_constraint_terms_success(self): + with requests_mock.Mocker() as mocker: + constraint_id = "bc_1" + term_id = "term_1" + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint_id}/term/{term_id}" + ) + mocker.delete(url, status_code=200) + self.constraint_service.delete_binding_constraint_term(constraint_id, term_id) + + def test_delete_constraint_terms_fails(self): + with requests_mock.Mocker() as mocker: + constraint_id = "bc_1" + term_id = "term_1" + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint_id}/term/{term_id}" + ) + mocker.delete( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + ConstraintTermDeletionError, + match=f"Could not delete the term {term_id} of the binding constraint {constraint_id}: {self.antares_web_description_msg}", + ): + self.constraint_service.delete_binding_constraint_term(constraint_id, term_id) diff --git a/tests/antares/services/api_services/test_area_api.py b/tests/antares/services/api_services/test_area_api.py index 01583cc8..ce481424 100644 --- a/tests/antares/services/api_services/test_area_api.py +++ b/tests/antares/services/api_services/test_area_api.py @@ -1,317 +1,380 @@ -import json - -from antares.api_conf.api_conf import APIconf -from antares.exceptions.exceptions import ( - ThermalCreationError, - RenewableCreationError, - STStorageCreationError, - AreaPropertiesUpdateError, - AreaUiUpdateError, - LoadMatrixDownloadError, - LoadMatrixUploadError, - MatrixUploadError, -) -from antares.model.area import Area, AreaUi, AreaProperties -from antares.model.hydro import HydroMatrixName, HydroProperties, Hydro -from antares.model.renewable import RenewableClusterProperties, RenewableCluster -from antares.model.st_storage import STStorageProperties, STStorage -from antares.model.thermal import ThermalClusterProperties, ThermalCluster -from antares.service.service_factory import ServiceFactory -import requests_mock - -import pytest -import pandas as pd -import numpy as np - - -class TestCreateAPI: - api = APIconf("https://antares.com", "token", verify=False) - study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" - area = Area( - "area_test", - ServiceFactory(api, study_id).create_area_service(), - ServiceFactory(api, study_id).create_st_storage_service(), - ServiceFactory(api, study_id).create_thermal_service(), - ServiceFactory(api, study_id).create_renewable_service(), - ) - antares_web_description_msg = "Mocked Server KO" - matrix = pd.DataFrame(data=[[0]]) - - def test_update_area_properties_success(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/properties/form" - properties = AreaProperties() - mocker.put(url, status_code=200) - mocker.get(url, json=properties, status_code=200) - self.area.update_properties(properties=properties) - - def test_update_area_properties_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/properties/form" - properties = AreaProperties() - properties.energy_cost_unsupplied = 100 - antares_web_description_msg = "Server KO" - mocker.put(url, json={"description": antares_web_description_msg}, status_code=404) - with pytest.raises( - AreaPropertiesUpdateError, - match=f"Could not update properties for area {self.area.id}: {antares_web_description_msg}", - ): - self.area.update_properties(properties=properties) - - def test_update_area_ui_success(self): - with requests_mock.Mocker() as mocker: - ui = AreaUi(layerX={"1": 0}, layerY={"1": 0}, layerColor={"1": "0"}) - url1 = f"https://antares.com/api/v1/studies/{self.study_id}/areas?type=AREA&ui=true" - ui_info = {"ui": {"x": 0, "y": 0, "layers": 0, "color_r": 0, "color_g": 0, "color_b": 0}} - area_ui = {**ui.model_dump(by_alias=True), **ui_info} - mocker.get(url1, json={self.area.id: area_ui}, status_code=201) - url2 = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/ui" - mocker.put(url2, status_code=200) - - self.area.update_ui(ui) - - def test_update_area_ui_fails(self): - with requests_mock.Mocker() as mocker: - ui = AreaUi(layerX={"1": 0}, layerY={"1": 0}, layerColor={"1": "0"}) - url1 = f"https://antares.com/api/v1/studies/{self.study_id}/areas?type=AREA&ui=true" - ui_info = {"ui": {"x": 0, "y": 0, "layers": 0, "color_r": 0, "color_g": 0, "color_b": 0}} - area_ui = {**ui.model_dump(by_alias=True), **ui_info} - mocker.get(url1, json={self.area.id: area_ui}, status_code=201) - url2 = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/ui" - antares_web_description_msg = "Server KO" - mocker.put(url2, json={"description": antares_web_description_msg}, status_code=404) - with pytest.raises( - AreaUiUpdateError, - match=f"Could not update ui for area {self.area.id}: {antares_web_description_msg}", - ): - self.area.update_ui(ui) - - def test_create_thermal_success(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/clusters/thermal" - json_response = json.loads(ThermalClusterProperties().model_dump_json(by_alias=True)) - thermal_name = "thermal_cluster" - mocker.post(url, json={"name": thermal_name, "id": thermal_name, **json_response}, status_code=201) - thermal = self.area.create_thermal_cluster(thermal_name=thermal_name) - assert isinstance(thermal, ThermalCluster) - - def test_create_thermal_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/clusters/thermal" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - thermal_name = "thermal_cluster" - - with pytest.raises( - ThermalCreationError, - match=f"Could not create the thermal cluster {thermal_name} inside area {self.area.id}: {self.antares_web_description_msg}", - ): - self.area.create_thermal_cluster(thermal_name=thermal_name) - - def test_create_renewable_success(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/clusters/renewable" - json_response = json.loads(RenewableClusterProperties().model_dump_json(by_alias=True)) - renewable_name = "renewable_cluster" - mocker.post(url, json={"name": renewable_name, "id": renewable_name, **json_response}, status_code=201) - - renewable = self.area.create_renewable_cluster( - renewable_name=renewable_name, properties=RenewableClusterProperties(), series=None - ) - assert isinstance(renewable, RenewableCluster) - - def test_create_renewable_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/clusters/renewable" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - renewable_name = "renewable_cluster" - - with pytest.raises( - RenewableCreationError, - match=f"Could not create the renewable cluster {renewable_name} inside area {self.area.id}: {self.antares_web_description_msg}", - ): - self.area.create_renewable_cluster( - renewable_name=renewable_name, properties=RenewableClusterProperties(), series=None - ) - - def test_create_st_storage_success(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/storages" - json_response = json.loads(STStorageProperties().model_dump_json(by_alias=True)) - st_storage_name = "short_term_storage" - mocker.post(url, json={"name": st_storage_name, "id": st_storage_name, **json_response}, status_code=201) - - st_storage = self.area.create_st_storage(st_storage_name=st_storage_name) - assert isinstance(st_storage, STStorage) - - def test_create_st_storage_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/storages" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - st_storage_name = "short_term_storage" - - with pytest.raises( - STStorageCreationError, - match=f"Could not create the short term storage {st_storage_name} inside area {self.area.id}: {self.antares_web_description_msg}", - ): - self.area.create_st_storage(st_storage_name=st_storage_name) - - def test_create_thermal_cluster_with_matrices(self): - expected_url = f"https://antares.com/api/v1/studies/{self.study_id}/commands" - matrix_test = pd.DataFrame(data=np.ones((8760, 1))) - json_for_post = ( - [ - { - "action": "create_cluster", - "args": { - "area_id": "fr", - "cluster_name": "cluster 1", - "parameters": {}, - "prepro": matrix_test.to_dict(orient="split"), - "modulation": matrix_test.to_dict(orient="split"), - }, - } - ], - ) - with requests_mock.Mocker() as mocker: - mocker.post(expected_url, json=json_for_post, status_code=200) - - thermal_cluster = self.area.create_thermal_cluster_with_matrices( - cluster_name="cluster_test", - parameters=ThermalClusterProperties(), - prepro=matrix_test, - modulation=matrix_test, - series=matrix_test, - CO2Cost=matrix_test, - fuelCost=matrix_test, - ) - # to assert two http requests to "commands" - assert len(mocker.request_history) == 2 - assert isinstance(thermal_cluster, ThermalCluster) - - def test_create_hydro_success(self): - url_hydro_form = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/hydro/form" - json_for_post = json.loads(HydroProperties().model_dump_json(by_alias=True)) - series = pd.DataFrame(data=np.ones((150, 1))) - - url_for_command = f"https://antares.com/api/v1/studies/{self.study_id}/commands" - - matrices_hydro = { - HydroMatrixName.SERIES_ROR: series, - HydroMatrixName.SERIES_MOD: series, - HydroMatrixName.SERIES_MIN_GEN: series, - HydroMatrixName.PREPRO_ENERGY: series, - HydroMatrixName.COMMON_WATER_VALUES: series, - HydroMatrixName.COMMON_RESERVOIR: series, - HydroMatrixName.COMMON_MAX_POWER: series, - HydroMatrixName.COMMON_INFLOW_PATTERN: series, - HydroMatrixName.COMMON_CREDIT_MODULATIONS: series, - } - with requests_mock.Mocker() as mocker: - mocker.put(url_hydro_form, json=json_for_post, status_code=200) - mocker.post(url_for_command) - hydro = self.area.create_hydro(properties=HydroProperties(), matrices=matrices_hydro) - # to assert two http requests to "commands" and "hydro/form" - assert len(mocker.request_history) == 2 - assert isinstance(hydro, Hydro) - - def test_get_load_matrix_success(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" - mocker.get(url, json={"data": [[0]], "index": [0], "columns": [0]}, status_code=200) - load_matrix = self.area.get_load_matrix() - assert load_matrix.equals(self.matrix) - - def test_get_load_matrix_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" - mocker.get(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - LoadMatrixDownloadError, - match=f"Could not download load matrix for area {self.area.id}: {self.antares_web_description_msg}", - ): - self.area.get_load_matrix() - - def test_upload_load_matrix_success(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" - mocker.post(url, status_code=200) - self.area.upload_load_matrix(pd.DataFrame(data=np.ones((8760, 1)))) - - def test_upload_load_matrix_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - LoadMatrixUploadError, - match=f"Could not upload load matrix for area {self.area.id}: {self.antares_web_description_msg}", - ): - self.area.upload_load_matrix(pd.DataFrame(data=np.ones((8760, 1)))) - - def test_upload_wrong_load_matrix_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - LoadMatrixUploadError, - match=f"Could not upload load matrix for area {self.area.id}: Expected 8760 rows and received 1.", - ): - self.area.upload_load_matrix(self.matrix) - - def test_create_wind_success(self): - with requests_mock.Mocker() as mocker: - expected_url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/" f"raw?path=input/wind/series/wind_area_test" - ) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" f"input/wind/series/wind_{self.area.id}" - ) - mocker.post(url, status_code=200) - self.area.create_wind(series=self.matrix) - - assert mocker.request_history[0].url == expected_url - - def test_create_reserves_success(self): - with requests_mock.Mocker() as mocker: - expected_url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/reserves/area_test" - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" f"input/reserves/{self.area.id}" - mocker.post(url, status_code=200) - self.area.create_reserves(series=self.matrix) - - assert mocker.request_history[0].url == expected_url - - def test_create_reserves_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" f"input/reserves/{self.area.id}" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - - with pytest.raises( - MatrixUploadError, match=f"Error uploading matrix for area {self.area.id}: Mocked Server KO" - ): - self.area.create_reserves(series=self.matrix) - - def test_create_solar_success(self): - with requests_mock.Mocker() as mocker: - expected_url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/solar/series/solar_area_test" - ) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" - f"input/solar/series/solar_{self.area.id}" - ) - mocker.post(url, status_code=200) - self.area.create_solar(series=self.matrix) - - assert mocker.request_history[0].url == expected_url - - def test_create_misc_gen_success(self): - with requests_mock.Mocker() as mocker: - expected_url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/misc-gen/miscgen-area_test" - ) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" f"input/misc-gen/miscgen-{self.area.id}" - ) - mocker.post(url, status_code=200) - self.area.create_misc_gen(series=self.matrix) - - assert mocker.request_history[0].url == expected_url +import json + +from antares.api_conf.api_conf import APIconf +from antares.exceptions.exceptions import ( + ThermalCreationError, + RenewableCreationError, + STStorageCreationError, + AreaPropertiesUpdateError, + AreaUiUpdateError, + LoadMatrixDownloadError, + LoadMatrixUploadError, + MatrixUploadError, +) +from antares.model.area import Area, AreaUi, AreaProperties +from antares.model.hydro import HydroMatrixName, HydroProperties, Hydro +from antares.model.renewable import RenewableClusterProperties, RenewableCluster +from antares.model.st_storage import STStorageProperties, STStorage +from antares.model.thermal import ThermalClusterProperties, ThermalCluster +from antares.service.service_factory import ServiceFactory +import requests_mock + +import pytest +import pandas as pd +import numpy as np + + +class TestCreateAPI: + api = APIconf("https://antares.com", "token", verify=False) + study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" + area = Area( + "area_test", + ServiceFactory(api, study_id).create_area_service(), + ServiceFactory(api, study_id).create_st_storage_service(), + ServiceFactory(api, study_id).create_thermal_service(), + ServiceFactory(api, study_id).create_renewable_service(), + ) + antares_web_description_msg = "Mocked Server KO" + matrix = pd.DataFrame(data=[[0]]) + + def test_update_area_properties_success(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/properties/form" + properties = AreaProperties() + mocker.put(url, status_code=200) + mocker.get(url, json=properties, status_code=200) + self.area.update_properties(properties=properties) + + def test_update_area_properties_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/properties/form" + properties = AreaProperties() + properties.energy_cost_unsupplied = 100 + antares_web_description_msg = "Server KO" + mocker.put(url, json={"description": antares_web_description_msg}, status_code=404) + with pytest.raises( + AreaPropertiesUpdateError, + match=f"Could not update properties for area {self.area.id}: {antares_web_description_msg}", + ): + self.area.update_properties(properties=properties) + + def test_update_area_ui_success(self): + with requests_mock.Mocker() as mocker: + ui = AreaUi(layerX={"1": 0}, layerY={"1": 0}, layerColor={"1": "0"}) + url1 = f"https://antares.com/api/v1/studies/{self.study_id}/areas?type=AREA&ui=true" + ui_info = { + "ui": { + "x": 0, + "y": 0, + "layers": 0, + "color_r": 0, + "color_g": 0, + "color_b": 0, + } + } + area_ui = {**ui.model_dump(by_alias=True), **ui_info} + mocker.get(url1, json={self.area.id: area_ui}, status_code=201) + url2 = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/ui" + mocker.put(url2, status_code=200) + + self.area.update_ui(ui) + + def test_update_area_ui_fails(self): + with requests_mock.Mocker() as mocker: + ui = AreaUi(layerX={"1": 0}, layerY={"1": 0}, layerColor={"1": "0"}) + url1 = f"https://antares.com/api/v1/studies/{self.study_id}/areas?type=AREA&ui=true" + ui_info = { + "ui": { + "x": 0, + "y": 0, + "layers": 0, + "color_r": 0, + "color_g": 0, + "color_b": 0, + } + } + area_ui = {**ui.model_dump(by_alias=True), **ui_info} + mocker.get(url1, json={self.area.id: area_ui}, status_code=201) + url2 = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/ui" + antares_web_description_msg = "Server KO" + mocker.put(url2, json={"description": antares_web_description_msg}, status_code=404) + with pytest.raises( + AreaUiUpdateError, + match=f"Could not update ui for area {self.area.id}: {antares_web_description_msg}", + ): + self.area.update_ui(ui) + + def test_create_thermal_success(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/clusters/thermal" + json_response = json.loads(ThermalClusterProperties().model_dump_json(by_alias=True)) + thermal_name = "thermal_cluster" + mocker.post( + url, + json={"name": thermal_name, "id": thermal_name, **json_response}, + status_code=201, + ) + thermal = self.area.create_thermal_cluster(thermal_name=thermal_name) + assert isinstance(thermal, ThermalCluster) + + def test_create_thermal_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/clusters/thermal" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + thermal_name = "thermal_cluster" + + with pytest.raises( + ThermalCreationError, + match=f"Could not create the thermal cluster {thermal_name} inside area {self.area.id}: {self.antares_web_description_msg}", + ): + self.area.create_thermal_cluster(thermal_name=thermal_name) + + def test_create_renewable_success(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/clusters/renewable" + json_response = json.loads(RenewableClusterProperties().model_dump_json(by_alias=True)) + renewable_name = "renewable_cluster" + mocker.post( + url, + json={"name": renewable_name, "id": renewable_name, **json_response}, + status_code=201, + ) + + renewable = self.area.create_renewable_cluster( + renewable_name=renewable_name, + properties=RenewableClusterProperties(), + series=None, + ) + assert isinstance(renewable, RenewableCluster) + + def test_create_renewable_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/clusters/renewable" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + renewable_name = "renewable_cluster" + + with pytest.raises( + RenewableCreationError, + match=f"Could not create the renewable cluster {renewable_name} inside area {self.area.id}: {self.antares_web_description_msg}", + ): + self.area.create_renewable_cluster( + renewable_name=renewable_name, + properties=RenewableClusterProperties(), + series=None, + ) + + def test_create_st_storage_success(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/storages" + json_response = json.loads(STStorageProperties().model_dump_json(by_alias=True)) + st_storage_name = "short_term_storage" + mocker.post( + url, + json={"name": st_storage_name, "id": st_storage_name, **json_response}, + status_code=201, + ) + + st_storage = self.area.create_st_storage(st_storage_name=st_storage_name) + assert isinstance(st_storage, STStorage) + + def test_create_st_storage_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/storages" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + st_storage_name = "short_term_storage" + + with pytest.raises( + STStorageCreationError, + match=f"Could not create the short term storage {st_storage_name} inside area {self.area.id}: {self.antares_web_description_msg}", + ): + self.area.create_st_storage(st_storage_name=st_storage_name) + + def test_create_thermal_cluster_with_matrices(self): + expected_url = f"https://antares.com/api/v1/studies/{self.study_id}/commands" + matrix_test = pd.DataFrame(data=np.ones((8760, 1))) + json_for_post = ( + [ + { + "action": "create_cluster", + "args": { + "area_id": "fr", + "cluster_name": "cluster 1", + "parameters": {}, + "prepro": matrix_test.to_dict(orient="split"), + "modulation": matrix_test.to_dict(orient="split"), + }, + } + ], + ) + with requests_mock.Mocker() as mocker: + mocker.post(expected_url, json=json_for_post, status_code=200) + + thermal_cluster = self.area.create_thermal_cluster_with_matrices( + cluster_name="cluster_test", + parameters=ThermalClusterProperties(), + prepro=matrix_test, + modulation=matrix_test, + series=matrix_test, + CO2Cost=matrix_test, + fuelCost=matrix_test, + ) + # to assert two http requests to "commands" + assert len(mocker.request_history) == 2 + assert isinstance(thermal_cluster, ThermalCluster) + + def test_create_hydro_success(self): + url_hydro_form = f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.area.id}/hydro/form" + json_for_post = json.loads(HydroProperties().model_dump_json(by_alias=True)) + series = pd.DataFrame(data=np.ones((150, 1))) + + url_for_command = f"https://antares.com/api/v1/studies/{self.study_id}/commands" + + matrices_hydro = { + HydroMatrixName.SERIES_ROR: series, + HydroMatrixName.SERIES_MOD: series, + HydroMatrixName.SERIES_MIN_GEN: series, + HydroMatrixName.PREPRO_ENERGY: series, + HydroMatrixName.COMMON_WATER_VALUES: series, + HydroMatrixName.COMMON_RESERVOIR: series, + HydroMatrixName.COMMON_MAX_POWER: series, + HydroMatrixName.COMMON_INFLOW_PATTERN: series, + HydroMatrixName.COMMON_CREDIT_MODULATIONS: series, + } + with requests_mock.Mocker() as mocker: + mocker.put(url_hydro_form, json=json_for_post, status_code=200) + mocker.post(url_for_command) + hydro = self.area.create_hydro(properties=HydroProperties(), matrices=matrices_hydro) + # to assert two http requests to "commands" and "hydro/form" + assert len(mocker.request_history) == 2 + assert isinstance(hydro, Hydro) + + def test_get_load_matrix_success(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" + mocker.get(url, json={"data": [[0]], "index": [0], "columns": [0]}, status_code=200) + load_matrix = self.area.get_load_matrix() + assert load_matrix.equals(self.matrix) + + def test_get_load_matrix_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" + mocker.get( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + LoadMatrixDownloadError, + match=f"Could not download load matrix for area {self.area.id}: {self.antares_web_description_msg}", + ): + self.area.get_load_matrix() + + def test_upload_load_matrix_success(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" + mocker.post(url, status_code=200) + self.area.upload_load_matrix(pd.DataFrame(data=np.ones((8760, 1)))) + + def test_upload_load_matrix_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + LoadMatrixUploadError, + match=f"Could not upload load matrix for area {self.area.id}: {self.antares_web_description_msg}", + ): + self.area.upload_load_matrix(pd.DataFrame(data=np.ones((8760, 1)))) + + def test_upload_wrong_load_matrix_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/load/series/load_{self.area.id}" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + LoadMatrixUploadError, + match=f"Could not upload load matrix for area {self.area.id}: Expected 8760 rows and received 1.", + ): + self.area.upload_load_matrix(self.matrix) + + def test_create_wind_success(self): + with requests_mock.Mocker() as mocker: + expected_url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/" f"raw?path=input/wind/series/wind_area_test" + ) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" f"input/wind/series/wind_{self.area.id}" + ) + mocker.post(url, status_code=200) + self.area.create_wind(series=self.matrix) + + assert mocker.request_history[0].url == expected_url + + def test_create_reserves_success(self): + with requests_mock.Mocker() as mocker: + expected_url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/reserves/area_test" + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" f"input/reserves/{self.area.id}" + mocker.post(url, status_code=200) + self.area.create_reserves(series=self.matrix) + + assert mocker.request_history[0].url == expected_url + + def test_create_reserves_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" f"input/reserves/{self.area.id}" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + + with pytest.raises( + MatrixUploadError, + match=f"Error uploading matrix for area {self.area.id}: Mocked Server KO", + ): + self.area.create_reserves(series=self.matrix) + + def test_create_solar_success(self): + with requests_mock.Mocker() as mocker: + expected_url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/solar/series/solar_area_test" + ) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" + f"input/solar/series/solar_{self.area.id}" + ) + mocker.post(url, status_code=200) + self.area.create_solar(series=self.matrix) + + assert mocker.request_history[0].url == expected_url + + def test_create_misc_gen_success(self): + with requests_mock.Mocker() as mocker: + expected_url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/misc-gen/miscgen-area_test" + ) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=" f"input/misc-gen/miscgen-{self.area.id}" + ) + mocker.post(url, status_code=200) + self.area.create_misc_gen(series=self.matrix) + + assert mocker.request_history[0].url == expected_url diff --git a/tests/antares/services/api_services/test_binding_constraint_api.py b/tests/antares/services/api_services/test_binding_constraint_api.py index d3fe60c6..ec427e07 100644 --- a/tests/antares/services/api_services/test_binding_constraint_api.py +++ b/tests/antares/services/api_services/test_binding_constraint_api.py @@ -1,87 +1,130 @@ -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.exceptions.exceptions import ConstraintPropertiesUpdateError, ConstraintMatrixDownloadError -from antares.model.area import Area -from antares.model.binding_constraint import BindingConstraintProperties, BindingConstraint, ConstraintMatrixName -from antares.model.study import Study -from antares.service.service_factory import ServiceFactory -import requests_mock - -import pytest - - -@pytest.fixture -def constraint_set(): - params = [ - ("get_less_term_matrix", ConstraintMatrixName.LESS_TERM, "input/bindingconstraints/bc_test_lt", [[0]]), - ("get_greater_term_matrix", ConstraintMatrixName.GREATER_TERM, "input/bindingconstraints/bc_test_gt", [[0]]), - ("get_equal_term_matrix", ConstraintMatrixName.EQUAL_TERM, "input/bindingconstraints/bc_test_eq", [[0]]), - ] - return params - - -class TestCreateAPI: - api = APIconf("https://antares.com", "token", verify=False) - study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" - study = Study("study_test", "870", ServiceFactory(api, study_id)) - area = Area( - "study_test", - ServiceFactory(api, study_id).create_area_service(), - ServiceFactory(api, study_id).create_st_storage_service(), - ServiceFactory(api, study_id).create_thermal_service(), - ServiceFactory(api, study_id).create_renewable_service(), - ) - antares_web_description_msg = "Mocked Server KO" - matrix = pd.DataFrame(data=[[0]]) - - def test_update_binding_constraint_properties_success(self): - with requests_mock.Mocker() as mocker: - properties = BindingConstraintProperties(enabled=False) - constraint = BindingConstraint( - "bc_1", ServiceFactory(self.api, self.study_id).create_binding_constraints_service() - ) - url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint.id}" - mocker.put(url, json={"id": "id", "name": "name", "terms": [], **properties.model_dump()}, status_code=200) - constraint.update_properties(properties=properties) - - def test_update_binding_constraint_properties_fails(self): - with requests_mock.Mocker() as mocker: - properties = BindingConstraintProperties(enabled=False) - constraint = BindingConstraint( - "bc_1", ServiceFactory(self.api, self.study_id).create_binding_constraints_service() - ) - url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint.id}" - antares_web_description_msg = "Server KO" - mocker.put(url, json={"description": antares_web_description_msg}, status_code=404) - - with pytest.raises( - ConstraintPropertiesUpdateError, - match=f"Could not update properties for binding constraint {constraint.id}: {antares_web_description_msg}", - ): - constraint.update_properties(properties=properties) - - def test_get_constraint_matrix_success(self, constraint_set): - constraint = BindingConstraint( - "bc_test", ServiceFactory(self.api, self.study_id).create_binding_constraints_service() - ) - for matrix_method, enum_value, path, expected_matrix in constraint_set: - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path={path}" - mocker.get(url, json={"data": expected_matrix, "index": [0], "columns": [0]}, status_code=200) - constraint_matrix = getattr(constraint, matrix_method)() - assert constraint_matrix.equals(self.matrix) - - def test_get_constraint_matrix_fails(self, constraint_set): - constraint = BindingConstraint( - "bc_test", ServiceFactory(self.api, self.study_id).create_binding_constraints_service() - ) - for matrix_method, enum_value, path, _ in constraint_set: - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path={path}" - mocker.get(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - ConstraintMatrixDownloadError, - match=f"Could not download matrix {enum_value.value} for binding constraint {constraint.name}:", - ): - getattr(constraint, matrix_method)() +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.exceptions.exceptions import ( + ConstraintPropertiesUpdateError, + ConstraintMatrixDownloadError, +) +from antares.model.area import Area +from antares.model.binding_constraint import ( + BindingConstraintProperties, + BindingConstraint, + ConstraintMatrixName, +) +from antares.model.study import Study +from antares.service.service_factory import ServiceFactory +import requests_mock + +import pytest + + +@pytest.fixture +def constraint_set(): + params = [ + ( + "get_less_term_matrix", + ConstraintMatrixName.LESS_TERM, + "input/bindingconstraints/bc_test_lt", + [[0]], + ), + ( + "get_greater_term_matrix", + ConstraintMatrixName.GREATER_TERM, + "input/bindingconstraints/bc_test_gt", + [[0]], + ), + ( + "get_equal_term_matrix", + ConstraintMatrixName.EQUAL_TERM, + "input/bindingconstraints/bc_test_eq", + [[0]], + ), + ] + return params + + +class TestCreateAPI: + api = APIconf("https://antares.com", "token", verify=False) + study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" + study = Study("study_test", "870", ServiceFactory(api, study_id)) + area = Area( + "study_test", + ServiceFactory(api, study_id).create_area_service(), + ServiceFactory(api, study_id).create_st_storage_service(), + ServiceFactory(api, study_id).create_thermal_service(), + ServiceFactory(api, study_id).create_renewable_service(), + ) + antares_web_description_msg = "Mocked Server KO" + matrix = pd.DataFrame(data=[[0]]) + + def test_update_binding_constraint_properties_success(self): + with requests_mock.Mocker() as mocker: + properties = BindingConstraintProperties(enabled=False) + constraint = BindingConstraint( + "bc_1", + ServiceFactory(self.api, self.study_id).create_binding_constraints_service(), + ) + url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint.id}" + mocker.put( + url, + json={ + "id": "id", + "name": "name", + "terms": [], + **properties.model_dump(), + }, + status_code=200, + ) + constraint.update_properties(properties=properties) + + def test_update_binding_constraint_properties_fails(self): + with requests_mock.Mocker() as mocker: + properties = BindingConstraintProperties(enabled=False) + constraint = BindingConstraint( + "bc_1", + ServiceFactory(self.api, self.study_id).create_binding_constraints_service(), + ) + url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints/{constraint.id}" + antares_web_description_msg = "Server KO" + mocker.put(url, json={"description": antares_web_description_msg}, status_code=404) + + with pytest.raises( + ConstraintPropertiesUpdateError, + match=f"Could not update properties for binding constraint {constraint.id}: {antares_web_description_msg}", + ): + constraint.update_properties(properties=properties) + + def test_get_constraint_matrix_success(self, constraint_set): + constraint = BindingConstraint( + "bc_test", + ServiceFactory(self.api, self.study_id).create_binding_constraints_service(), + ) + for matrix_method, enum_value, path, expected_matrix in constraint_set: + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path={path}" + mocker.get( + url, + json={"data": expected_matrix, "index": [0], "columns": [0]}, + status_code=200, + ) + constraint_matrix = getattr(constraint, matrix_method)() + assert constraint_matrix.equals(self.matrix) + + def test_get_constraint_matrix_fails(self, constraint_set): + constraint = BindingConstraint( + "bc_test", + ServiceFactory(self.api, self.study_id).create_binding_constraints_service(), + ) + for matrix_method, enum_value, path, _ in constraint_set: + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/raw?path={path}" + mocker.get( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + ConstraintMatrixDownloadError, + match=f"Could not download matrix {enum_value.value} for binding constraint {constraint.name}:", + ): + getattr(constraint, matrix_method)() diff --git a/tests/antares/services/api_services/test_link_api.py b/tests/antares/services/api_services/test_link_api.py index dd6762d9..0e8bfecd 100644 --- a/tests/antares/services/api_services/test_link_api.py +++ b/tests/antares/services/api_services/test_link_api.py @@ -1,92 +1,118 @@ -import json - -from antares.api_conf.api_conf import APIconf -from antares.exceptions.exceptions import LinkUiUpdateError, LinkPropertiesUpdateError -from antares.model.area import Area -from antares.model.commons import FilterOption -from antares.model.link import LinkProperties, LinkUi, Link -from antares.model.study import Study -from antares.service.service_factory import ServiceFactory -import requests_mock - -import pytest - - -class TestCreateAPI: - api = APIconf("https://antares.com", "token", verify=False) - study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" - study = Study("study_test", "870", ServiceFactory(api, study_id)) - area_from = Area( - name="area_from", area_service=api, storage_service=api, thermal_service=api, renewable_service=api - ) - area_to = Area(name="area_to", area_service=api, storage_service=api, thermal_service=api, renewable_service=api) - antares_web_description_msg = "Mocked Server KO" - link = Link(area_from, area_to, ServiceFactory(api, study_id).create_link_service()) - - def test_update_links_properties_success(self): - with requests_mock.Mocker() as mocker: - properties = LinkProperties() - properties.filter_synthesis = [FilterOption.DAILY] - properties.filter_year_by_year = [FilterOption.DAILY] - ui = LinkUi() - raw_url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/links/" - f"{self.area_from.id}/properties/{self.area_to.id}" - ) - mocker.post(raw_url, status_code=200) - mocker.get( - raw_url, - json={**ui.model_dump(by_alias=True), **json.loads(properties.model_dump_json(by_alias=True))}, - status_code=200, - ) - - self.link.update_properties(properties) - - def test_update_links_properties_fails(self): - with requests_mock.Mocker() as mocker: - properties = LinkProperties() - properties.filter_synthesis = [FilterOption.DAILY] - properties.filter_year_by_year = [FilterOption.DAILY] - raw_url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/links/" - f"{self.area_from.id}/properties/{self.area_to.id}" - ) - antares_web_description_msg = "Server KO" - mocker.get(raw_url, json={"description": antares_web_description_msg}, status_code=404) - with pytest.raises( - LinkPropertiesUpdateError, - match=f"Could not update properties for link {self.link.name}: {antares_web_description_msg}", - ): - self.link.update_properties(properties) - - def test_update_links_ui_success(self): - with requests_mock.Mocker() as mocker: - properties = LinkProperties() - ui = LinkUi() - ui.link_width = 12 - raw_url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/links/{self.area_from.id}" - f"/properties/{self.area_to.id}" - ) - mocker.post(raw_url, status_code=200) - mocker.get( - raw_url, json={**ui.model_dump(by_alias=True), **properties.model_dump(by_alias=True)}, status_code=200 - ) - - self.link.update_ui(ui) - - def test_update_links_ui_fails(self): - with requests_mock.Mocker() as mocker: - ui = LinkUi() - ui.link_width = 12 - raw_url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/links/{self.area_from.id}" - f"/properties/{self.area_to.id}" - ) - antares_web_description_msg = "Server KO" - mocker.get(raw_url, json={"description": antares_web_description_msg}, status_code=404) - with pytest.raises( - LinkUiUpdateError, - match=f"Could not update ui for link {self.link.name}: {antares_web_description_msg}", - ): - self.link.update_ui(ui) +import json + +from antares.api_conf.api_conf import APIconf +from antares.exceptions.exceptions import LinkUiUpdateError, LinkPropertiesUpdateError +from antares.model.area import Area +from antares.model.commons import FilterOption +from antares.model.link import LinkProperties, LinkUi, Link +from antares.model.study import Study +from antares.service.service_factory import ServiceFactory +import requests_mock + +import pytest + + +class TestCreateAPI: + api = APIconf("https://antares.com", "token", verify=False) + study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" + study = Study("study_test", "870", ServiceFactory(api, study_id)) + area_from = Area( + name="area_from", + area_service=api, + storage_service=api, + thermal_service=api, + renewable_service=api, + ) + area_to = Area( + name="area_to", + area_service=api, + storage_service=api, + thermal_service=api, + renewable_service=api, + ) + antares_web_description_msg = "Mocked Server KO" + link = Link(area_from, area_to, ServiceFactory(api, study_id).create_link_service()) + + def test_update_links_properties_success(self): + with requests_mock.Mocker() as mocker: + properties = LinkProperties() + properties.filter_synthesis = [FilterOption.DAILY] + properties.filter_year_by_year = [FilterOption.DAILY] + ui = LinkUi() + raw_url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/links/" + f"{self.area_from.id}/properties/{self.area_to.id}" + ) + mocker.post(raw_url, status_code=200) + mocker.get( + raw_url, + json={ + **ui.model_dump(by_alias=True), + **json.loads(properties.model_dump_json(by_alias=True)), + }, + status_code=200, + ) + + self.link.update_properties(properties) + + def test_update_links_properties_fails(self): + with requests_mock.Mocker() as mocker: + properties = LinkProperties() + properties.filter_synthesis = [FilterOption.DAILY] + properties.filter_year_by_year = [FilterOption.DAILY] + raw_url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/links/" + f"{self.area_from.id}/properties/{self.area_to.id}" + ) + antares_web_description_msg = "Server KO" + mocker.get( + raw_url, + json={"description": antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + LinkPropertiesUpdateError, + match=f"Could not update properties for link {self.link.name}: {antares_web_description_msg}", + ): + self.link.update_properties(properties) + + def test_update_links_ui_success(self): + with requests_mock.Mocker() as mocker: + properties = LinkProperties() + ui = LinkUi() + ui.link_width = 12 + raw_url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/links/{self.area_from.id}" + f"/properties/{self.area_to.id}" + ) + mocker.post(raw_url, status_code=200) + mocker.get( + raw_url, + json={ + **ui.model_dump(by_alias=True), + **properties.model_dump(by_alias=True), + }, + status_code=200, + ) + + self.link.update_ui(ui) + + def test_update_links_ui_fails(self): + with requests_mock.Mocker() as mocker: + ui = LinkUi() + ui.link_width = 12 + raw_url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/links/{self.area_from.id}" + f"/properties/{self.area_to.id}" + ) + antares_web_description_msg = "Server KO" + mocker.get( + raw_url, + json={"description": antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + LinkUiUpdateError, + match=f"Could not update ui for link {self.link.name}: {antares_web_description_msg}", + ): + self.link.update_ui(ui) diff --git a/tests/antares/services/api_services/test_renewable_api.py b/tests/antares/services/api_services/test_renewable_api.py index 8b5c7958..deecdcb4 100644 --- a/tests/antares/services/api_services/test_renewable_api.py +++ b/tests/antares/services/api_services/test_renewable_api.py @@ -1,76 +1,87 @@ -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.exceptions.exceptions import RenewablePropertiesUpdateError, RenewableMatrixDownloadError -from antares.model.area import Area -from antares.model.renewable import RenewableClusterProperties, RenewableCluster -from antares.service.service_factory import ServiceFactory -import requests_mock - -import pytest - - -class TestCreateAPI: - api = APIconf("https://antares.com", "token", verify=False) - study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" - area = Area( - "study_test", - ServiceFactory(api, study_id).create_area_service(), - ServiceFactory(api, study_id).create_st_storage_service(), - ServiceFactory(api, study_id).create_thermal_service(), - ServiceFactory(api, study_id).create_renewable_service(), - ) - renewable = RenewableCluster(ServiceFactory(api, study_id).create_renewable_service(), area.id, "onshore_fr") - antares_web_description_msg = "Mocked Server KO" - matrix = pd.DataFrame(data=[[0]]) - - def test_update_renewable_properties_success(self): - with requests_mock.Mocker() as mocker: - properties = RenewableClusterProperties(enabled=False) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.renewable.area_id}/" - f"clusters/renewable/{self.renewable.id}" - ) - mocker.patch(url, json={"id": "id", "name": "name", **properties.model_dump()}, status_code=200) - self.renewable.update_properties(properties=properties) - - def test_update_renewable_properties_fails(self): - with requests_mock.Mocker() as mocker: - properties = RenewableClusterProperties(enabled=False) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.renewable.area_id}" - f"/clusters/renewable/{self.renewable.id}" - ) - antares_web_description_msg = "Server KO" - mocker.patch(url, json={"description": antares_web_description_msg}, status_code=404) - - with pytest.raises( - RenewablePropertiesUpdateError, - match=f"Could not update properties for renewable cluster {self.renewable.id} " - f"inside area {self.area.id}: {antares_web_description_msg}", - ): - self.renewable.update_properties(properties=properties) - - def test_get_renewable_matrices_success(self): - with requests_mock.Mocker() as mocker: - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/renewables/series/" - f"{self.area.id}/{self.renewable.name}/series" - ) - mocker.get(url, json={"data": [[0]], "index": [0], "columns": [0]}, status_code=200) - renewable_matrix = self.renewable.get_renewable_matrix() - assert renewable_matrix.equals(self.matrix) - - def test_get_renewable_matrices_fails(self): - with requests_mock.Mocker() as mocker: - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/renewables/series/" - f"{self.area.id}/{self.renewable.name}/series" - ) - mocker.get(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - RenewableMatrixDownloadError, - match=f"Could not download matrix for cluster {self.renewable.name} inside area {self.area.id}" - f": {self.antares_web_description_msg}", - ): - self.renewable.get_renewable_matrix() +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.exceptions.exceptions import ( + RenewablePropertiesUpdateError, + RenewableMatrixDownloadError, +) +from antares.model.area import Area +from antares.model.renewable import RenewableClusterProperties, RenewableCluster +from antares.service.service_factory import ServiceFactory +import requests_mock + +import pytest + + +class TestCreateAPI: + api = APIconf("https://antares.com", "token", verify=False) + study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" + area = Area( + "study_test", + ServiceFactory(api, study_id).create_area_service(), + ServiceFactory(api, study_id).create_st_storage_service(), + ServiceFactory(api, study_id).create_thermal_service(), + ServiceFactory(api, study_id).create_renewable_service(), + ) + renewable = RenewableCluster(ServiceFactory(api, study_id).create_renewable_service(), area.id, "onshore_fr") + antares_web_description_msg = "Mocked Server KO" + matrix = pd.DataFrame(data=[[0]]) + + def test_update_renewable_properties_success(self): + with requests_mock.Mocker() as mocker: + properties = RenewableClusterProperties(enabled=False) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.renewable.area_id}/" + f"clusters/renewable/{self.renewable.id}" + ) + mocker.patch( + url, + json={"id": "id", "name": "name", **properties.model_dump()}, + status_code=200, + ) + self.renewable.update_properties(properties=properties) + + def test_update_renewable_properties_fails(self): + with requests_mock.Mocker() as mocker: + properties = RenewableClusterProperties(enabled=False) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.renewable.area_id}" + f"/clusters/renewable/{self.renewable.id}" + ) + antares_web_description_msg = "Server KO" + mocker.patch(url, json={"description": antares_web_description_msg}, status_code=404) + + with pytest.raises( + RenewablePropertiesUpdateError, + match=f"Could not update properties for renewable cluster {self.renewable.id} " + f"inside area {self.area.id}: {antares_web_description_msg}", + ): + self.renewable.update_properties(properties=properties) + + def test_get_renewable_matrices_success(self): + with requests_mock.Mocker() as mocker: + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/renewables/series/" + f"{self.area.id}/{self.renewable.name}/series" + ) + mocker.get(url, json={"data": [[0]], "index": [0], "columns": [0]}, status_code=200) + renewable_matrix = self.renewable.get_renewable_matrix() + assert renewable_matrix.equals(self.matrix) + + def test_get_renewable_matrices_fails(self): + with requests_mock.Mocker() as mocker: + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/raw?path=input/renewables/series/" + f"{self.area.id}/{self.renewable.name}/series" + ) + mocker.get( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + RenewableMatrixDownloadError, + match=f"Could not download matrix for cluster {self.renewable.name} inside area {self.area.id}" + f": {self.antares_web_description_msg}", + ): + self.renewable.get_renewable_matrix() diff --git a/tests/antares/services/api_services/test_st_storage_api.py b/tests/antares/services/api_services/test_st_storage_api.py index 6d3bb1d1..f0084bb0 100644 --- a/tests/antares/services/api_services/test_st_storage_api.py +++ b/tests/antares/services/api_services/test_st_storage_api.py @@ -1,103 +1,115 @@ -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.exceptions.exceptions import ( - STStoragePropertiesUpdateError, - STStorageMatrixDownloadError, - STStorageMatrixUploadError, -) -from antares.model.area import Area -from antares.model.st_storage import STStorage, STStorageProperties -from antares.service.service_factory import ServiceFactory -import requests_mock - -import pytest - - -class TestCreateAPI: - api = APIconf("https://antares.com", "token", verify=False) - study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" - area = Area( - "study_test", - ServiceFactory(api, study_id).create_area_service(), - ServiceFactory(api, study_id).create_st_storage_service(), - ServiceFactory(api, study_id).create_thermal_service(), - ServiceFactory(api, study_id).create_renewable_service(), - ) - storage = STStorage(ServiceFactory(api, study_id).create_st_storage_service(), area.id, "battery_fr") - antares_web_description_msg = "Mocked Server KO" - matrix = pd.DataFrame(data=[[0]]) - - def test_update_st_storage_properties_success(self): - with requests_mock.Mocker() as mocker: - properties = STStorageProperties(enabled=False) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/" - f"areas/{self.storage.area_id}/storages/{self.storage.id}" - ) - mocker.patch(url, json={"id": "id", "name": "name", **properties.model_dump()}, status_code=200) - self.storage.update_properties(properties=properties) - - def test_update_st_storage_properties_fails(self): - with requests_mock.Mocker() as mocker: - properties = STStorageProperties(enabled=False) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}/" - f"storages/{self.storage.id}" - ) - antares_web_description_msg = "Server KO" - mocker.patch(url, json={"description": antares_web_description_msg}, status_code=404) - - with pytest.raises( - STStoragePropertiesUpdateError, - match=f"Could not update properties for short term storage {self.storage.id} " - f"inside area {self.area.id}: {antares_web_description_msg}", - ): - self.storage.update_properties(properties=properties) - - def test_get_storage_matrix_success(self): - with requests_mock.Mocker() as mocker: - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}" - f"/storages/{self.storage.id}/series/inflows" - ) - mocker.get(url, json={"data": [[0]], "index": [0], "columns": [0]}, status_code=200) - inflow_matrix = self.storage.get_storage_inflows() - assert inflow_matrix.equals(self.matrix) - - def test_get_storage_matrix_fails(self): - with requests_mock.Mocker() as mocker: - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}" - f"/storages/{self.storage.id}/series/inflows" - ) - mocker.get(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - STStorageMatrixDownloadError, - match=f"Could not download inflows matrix for storage {self.storage.id} " - f"inside area {self.area.id}: {self.antares_web_description_msg}", - ): - self.storage.get_storage_inflows() - - def test_upload_storage_matrix_success(self): - with requests_mock.Mocker() as mocker: - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}" - f"/storages/{self.storage.id}/series/inflows" - ) - mocker.put(url, status_code=200) - self.storage.upload_storage_inflows(self.matrix) - - def test_upload_storage_matrix_fails(self): - with requests_mock.Mocker() as mocker: - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}" - f"/storages/{self.storage.id}/series/inflows" - ) - mocker.put(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - STStorageMatrixUploadError, - match=f"Could not upload inflows matrix for storage {self.storage.id} inside area {self.area.id}:" - f" {self.antares_web_description_msg}", - ): - self.storage.upload_storage_inflows(self.matrix) +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.exceptions.exceptions import ( + STStoragePropertiesUpdateError, + STStorageMatrixDownloadError, + STStorageMatrixUploadError, +) +from antares.model.area import Area +from antares.model.st_storage import STStorage, STStorageProperties +from antares.service.service_factory import ServiceFactory +import requests_mock + +import pytest + + +class TestCreateAPI: + api = APIconf("https://antares.com", "token", verify=False) + study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" + area = Area( + "study_test", + ServiceFactory(api, study_id).create_area_service(), + ServiceFactory(api, study_id).create_st_storage_service(), + ServiceFactory(api, study_id).create_thermal_service(), + ServiceFactory(api, study_id).create_renewable_service(), + ) + storage = STStorage(ServiceFactory(api, study_id).create_st_storage_service(), area.id, "battery_fr") + antares_web_description_msg = "Mocked Server KO" + matrix = pd.DataFrame(data=[[0]]) + + def test_update_st_storage_properties_success(self): + with requests_mock.Mocker() as mocker: + properties = STStorageProperties(enabled=False) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/" + f"areas/{self.storage.area_id}/storages/{self.storage.id}" + ) + mocker.patch( + url, + json={"id": "id", "name": "name", **properties.model_dump()}, + status_code=200, + ) + self.storage.update_properties(properties=properties) + + def test_update_st_storage_properties_fails(self): + with requests_mock.Mocker() as mocker: + properties = STStorageProperties(enabled=False) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}/" + f"storages/{self.storage.id}" + ) + antares_web_description_msg = "Server KO" + mocker.patch(url, json={"description": antares_web_description_msg}, status_code=404) + + with pytest.raises( + STStoragePropertiesUpdateError, + match=f"Could not update properties for short term storage {self.storage.id} " + f"inside area {self.area.id}: {antares_web_description_msg}", + ): + self.storage.update_properties(properties=properties) + + def test_get_storage_matrix_success(self): + with requests_mock.Mocker() as mocker: + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}" + f"/storages/{self.storage.id}/series/inflows" + ) + mocker.get(url, json={"data": [[0]], "index": [0], "columns": [0]}, status_code=200) + inflow_matrix = self.storage.get_storage_inflows() + assert inflow_matrix.equals(self.matrix) + + def test_get_storage_matrix_fails(self): + with requests_mock.Mocker() as mocker: + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}" + f"/storages/{self.storage.id}/series/inflows" + ) + mocker.get( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + STStorageMatrixDownloadError, + match=f"Could not download inflows matrix for storage {self.storage.id} " + f"inside area {self.area.id}: {self.antares_web_description_msg}", + ): + self.storage.get_storage_inflows() + + def test_upload_storage_matrix_success(self): + with requests_mock.Mocker() as mocker: + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}" + f"/storages/{self.storage.id}/series/inflows" + ) + mocker.put(url, status_code=200) + self.storage.upload_storage_inflows(self.matrix) + + def test_upload_storage_matrix_fails(self): + with requests_mock.Mocker() as mocker: + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/areas/{self.storage.area_id}" + f"/storages/{self.storage.id}/series/inflows" + ) + mocker.put( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + STStorageMatrixUploadError, + match=f"Could not upload inflows matrix for storage {self.storage.id} inside area {self.area.id}:" + f" {self.antares_web_description_msg}", + ): + self.storage.upload_storage_inflows(self.matrix) diff --git a/tests/antares/services/api_services/test_study_api.py b/tests/antares/services/api_services/test_study_api.py index 86e6a2d0..89d33fd6 100644 --- a/tests/antares/services/api_services/test_study_api.py +++ b/tests/antares/services/api_services/test_study_api.py @@ -1,187 +1,231 @@ -import json - -from antares.api_conf.api_conf import APIconf -import requests_mock -import re - -import pytest - -from antares.exceptions.exceptions import ( - StudyCreationError, - BindingConstraintCreationError, - AreaCreationError, - LinkCreationError, - StudySettingsUpdateError, -) -from antares.model.area import AreaUi, AreaProperties, Area -from antares.model.binding_constraint import BindingConstraintProperties, BindingConstraint -from antares.model.link import LinkProperties, LinkUi, Link -from antares.model.settings import StudySettings, GeneralProperties -from antares.model.study import create_study_api, Study -from antares.service.service_factory import ServiceFactory - - -class TestCreateAPI: - api = APIconf("https://antares.com", "token", verify=False) - study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" - antares_web_description_msg = "Mocked Server KO" - study = Study("TestStudy", "880", ServiceFactory(api, study_id)) - area = Area( - "area_test", - ServiceFactory(api, study_id).create_area_service(), - ServiceFactory(api, study_id).create_st_storage_service(), - ServiceFactory(api, study_id).create_thermal_service(), - ServiceFactory(api, study_id).create_renewable_service(), - ) - - def test_create_study_test_ok(self) -> None: - with requests_mock.Mocker() as mocker: - expected_url = "https://antares.com/api/v1/studies?name=TestStudy&version=880" - mocker.post(expected_url, json=self.study_id, status_code=200) - config_urls = re.compile(f"https://antares.com/api/v1/studies/{self.study_id}/config/.*") - mocker.get(config_urls, json={}, status_code=200) - # When - study = create_study_api("TestStudy", "880", self.api) - - # Then - assert len(mocker.request_history) == 8 - assert mocker.request_history[0].url == expected_url - assert isinstance(study, Study) - - def test_create_study_fails(self): - with requests_mock.Mocker() as mocker: - url = "https://antares.com/api/v1/studies?name=TestStudy&version=880" - study_name = "TestStudy" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - - with pytest.raises( - StudyCreationError, - match=f"Could not create the study {study_name}: {self.antares_web_description_msg}", - ): - create_study_api(study_name, "880", self.api) - - def test_update_study_settings_success(self): - with requests_mock.Mocker() as mocker: - settings = StudySettings() - settings.general_properties = GeneralProperties(mode="Adequacy") - config_urls = re.compile(f"https://antares.com/api/v1/studies/{self.study_id}/config/.*") - mocker.put(config_urls, status_code=200) - mocker.get(config_urls, json={}, status_code=200) - self.study.update_settings(settings) - - def test_update_study_settings_fails(self): - with requests_mock.Mocker() as mocker: - settings = StudySettings() - settings.general_properties = GeneralProperties(mode="Adequacy") - config_urls = re.compile(f"https://antares.com/api/v1/studies/{self.study_id}/config/.*") - antares_web_description_msg = "Server KO" - mocker.put(config_urls, json={"description": antares_web_description_msg}, status_code=404) - with pytest.raises( - StudySettingsUpdateError, - match=f"Could not update settings for study {self.study_id}: {antares_web_description_msg}", - ): - self.study.update_settings(settings) - - def test_create_area_success(self): - area_name = "area_test" - with requests_mock.Mocker() as mocker: - base_url = "https://antares.com/api/v1" - - url1 = f"{base_url}/studies/{self.study_id}/areas" - mocker.post(url1, json={"id": area_name}, status_code=201) - ui_info = {"ui": {"x": 0, "y": 0, "layers": 0, "color_r": 0, "color_g": 0, "color_b": 0}} - area_ui = { - **AreaUi(layerX={"1": 0}, layerY={"1": 0}, layerColor={"1": "0"}).model_dump(by_alias=True), - **ui_info, - } - mocker.get(url1, json={area_name: area_ui}, status_code=201) - url2 = f"{base_url}/studies/{self.study_id}/areas/{area_name}/properties/form" - mocker.put(url2, status_code=201) - mocker.get(url2, json=AreaProperties().model_dump(), status_code=200) - - area = self.study.create_area(area_name) - assert isinstance(area, Area) - - def test_create_area_fails(self): - area_name = "area_test" - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/areas" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - - with pytest.raises( - AreaCreationError, - match=f"Could not create the area {area_name}: {self.antares_web_description_msg}", - ): - self.study.create_area(area_name) - - def test_create_link_success(self): - with requests_mock.Mocker() as mocker: - base_url = f"https://antares.com/api/v1/studies/{self.study_id}" - url = f"{base_url}/links" - mocker.post(url, status_code=200) - area = Area( - name="area", - area_service=self.api, - storage_service=self.api, - thermal_service=self.api, - renewable_service=self.api, - ) - area_to = Area( - name="area_to", - area_service=self.api, - storage_service=self.api, - thermal_service=self.api, - renewable_service=self.api, - ) - - raw_url = f"{base_url}/raw?path=input/links/{area.id}/properties/{area_to.id}" - json_response = {**LinkProperties().model_dump(by_alias=True), **LinkUi().model_dump(by_alias=True)} - mocker.get(raw_url, json=json_response, status_code=200) - link = self.study.create_link(area_from=area, area_to=area_to) - assert isinstance(link, Link) - - def test_create_link_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/links" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - area_from = Area( - name="area_from", - area_service=self.api, - storage_service=self.api, - thermal_service=self.api, - renewable_service=self.api, - ) - area_to = Area( - name="area_to", - area_service=self.api, - storage_service=self.api, - thermal_service=self.api, - renewable_service=self.api, - ) - - with pytest.raises( - LinkCreationError, - match=f"Could not create the link {area_from.id} / {area_to.id}: {self.antares_web_description_msg}", - ): - self.study.create_link(area_from=area_from, area_to=area_to) - - def test_create_binding_constraint_success(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints" - json_response = json.loads(BindingConstraintProperties().model_dump_json(by_alias=True)) - constraint_name = "bc_1" - mocker.post(url, json={"id": "id", "name": constraint_name, "terms": [], **json_response}, status_code=201) - constraint = self.study.create_binding_constraint(name=constraint_name) - assert isinstance(constraint, BindingConstraint) - - def test_create_binding_constraint_fails(self): - with requests_mock.Mocker() as mocker: - url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints" - mocker.post(url, json={"description": self.antares_web_description_msg}, status_code=404) - constraint_name = "bc_1" - - with pytest.raises( - BindingConstraintCreationError, - match=f"Could not create the binding constraint {constraint_name}: {self.antares_web_description_msg}", - ): - self.study.create_binding_constraint(name=constraint_name) +import json + +from antares.api_conf.api_conf import APIconf +import requests_mock +import re + +import pytest + +from antares.exceptions.exceptions import ( + StudyCreationError, + BindingConstraintCreationError, + AreaCreationError, + LinkCreationError, + StudySettingsUpdateError, +) +from antares.model.area import AreaUi, AreaProperties, Area +from antares.model.binding_constraint import ( + BindingConstraintProperties, + BindingConstraint, +) +from antares.model.link import LinkProperties, LinkUi, Link +from antares.model.settings import StudySettings, GeneralProperties +from antares.model.study import create_study_api, Study +from antares.service.service_factory import ServiceFactory + + +class TestCreateAPI: + api = APIconf("https://antares.com", "token", verify=False) + study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" + antares_web_description_msg = "Mocked Server KO" + study = Study("TestStudy", "880", ServiceFactory(api, study_id)) + area = Area( + "area_test", + ServiceFactory(api, study_id).create_area_service(), + ServiceFactory(api, study_id).create_st_storage_service(), + ServiceFactory(api, study_id).create_thermal_service(), + ServiceFactory(api, study_id).create_renewable_service(), + ) + + def test_create_study_test_ok(self) -> None: + with requests_mock.Mocker() as mocker: + expected_url = "https://antares.com/api/v1/studies?name=TestStudy&version=880" + mocker.post(expected_url, json=self.study_id, status_code=200) + config_urls = re.compile(f"https://antares.com/api/v1/studies/{self.study_id}/config/.*") + mocker.get(config_urls, json={}, status_code=200) + # When + study = create_study_api("TestStudy", "880", self.api) + + # Then + assert len(mocker.request_history) == 8 + assert mocker.request_history[0].url == expected_url + assert isinstance(study, Study) + + def test_create_study_fails(self): + with requests_mock.Mocker() as mocker: + url = "https://antares.com/api/v1/studies?name=TestStudy&version=880" + study_name = "TestStudy" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + + with pytest.raises( + StudyCreationError, + match=f"Could not create the study {study_name}: {self.antares_web_description_msg}", + ): + create_study_api(study_name, "880", self.api) + + def test_update_study_settings_success(self): + with requests_mock.Mocker() as mocker: + settings = StudySettings() + settings.general_properties = GeneralProperties(mode="Adequacy") + config_urls = re.compile(f"https://antares.com/api/v1/studies/{self.study_id}/config/.*") + mocker.put(config_urls, status_code=200) + mocker.get(config_urls, json={}, status_code=200) + self.study.update_settings(settings) + + def test_update_study_settings_fails(self): + with requests_mock.Mocker() as mocker: + settings = StudySettings() + settings.general_properties = GeneralProperties(mode="Adequacy") + config_urls = re.compile(f"https://antares.com/api/v1/studies/{self.study_id}/config/.*") + antares_web_description_msg = "Server KO" + mocker.put( + config_urls, + json={"description": antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + StudySettingsUpdateError, + match=f"Could not update settings for study {self.study_id}: {antares_web_description_msg}", + ): + self.study.update_settings(settings) + + def test_create_area_success(self): + area_name = "area_test" + with requests_mock.Mocker() as mocker: + base_url = "https://antares.com/api/v1" + + url1 = f"{base_url}/studies/{self.study_id}/areas" + mocker.post(url1, json={"id": area_name}, status_code=201) + ui_info = { + "ui": { + "x": 0, + "y": 0, + "layers": 0, + "color_r": 0, + "color_g": 0, + "color_b": 0, + } + } + area_ui = { + **AreaUi(layerX={"1": 0}, layerY={"1": 0}, layerColor={"1": "0"}).model_dump(by_alias=True), + **ui_info, + } + mocker.get(url1, json={area_name: area_ui}, status_code=201) + url2 = f"{base_url}/studies/{self.study_id}/areas/{area_name}/properties/form" + mocker.put(url2, status_code=201) + mocker.get(url2, json=AreaProperties().model_dump(), status_code=200) + + area = self.study.create_area(area_name) + assert isinstance(area, Area) + + def test_create_area_fails(self): + area_name = "area_test" + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/areas" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + + with pytest.raises( + AreaCreationError, + match=f"Could not create the area {area_name}: {self.antares_web_description_msg}", + ): + self.study.create_area(area_name) + + def test_create_link_success(self): + with requests_mock.Mocker() as mocker: + base_url = f"https://antares.com/api/v1/studies/{self.study_id}" + url = f"{base_url}/links" + mocker.post(url, status_code=200) + area = Area( + name="area", + area_service=self.api, + storage_service=self.api, + thermal_service=self.api, + renewable_service=self.api, + ) + area_to = Area( + name="area_to", + area_service=self.api, + storage_service=self.api, + thermal_service=self.api, + renewable_service=self.api, + ) + + raw_url = f"{base_url}/raw?path=input/links/{area.id}/properties/{area_to.id}" + json_response = { + **LinkProperties().model_dump(by_alias=True), + **LinkUi().model_dump(by_alias=True), + } + mocker.get(raw_url, json=json_response, status_code=200) + link = self.study.create_link(area_from=area, area_to=area_to) + assert isinstance(link, Link) + + def test_create_link_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/links" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + area_from = Area( + name="area_from", + area_service=self.api, + storage_service=self.api, + thermal_service=self.api, + renewable_service=self.api, + ) + area_to = Area( + name="area_to", + area_service=self.api, + storage_service=self.api, + thermal_service=self.api, + renewable_service=self.api, + ) + + with pytest.raises( + LinkCreationError, + match=f"Could not create the link {area_from.id} / {area_to.id}: {self.antares_web_description_msg}", + ): + self.study.create_link(area_from=area_from, area_to=area_to) + + def test_create_binding_constraint_success(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints" + json_response = json.loads(BindingConstraintProperties().model_dump_json(by_alias=True)) + constraint_name = "bc_1" + mocker.post( + url, + json={ + "id": "id", + "name": constraint_name, + "terms": [], + **json_response, + }, + status_code=201, + ) + constraint = self.study.create_binding_constraint(name=constraint_name) + assert isinstance(constraint, BindingConstraint) + + def test_create_binding_constraint_fails(self): + with requests_mock.Mocker() as mocker: + url = f"https://antares.com/api/v1/studies/{self.study_id}/bindingconstraints" + mocker.post( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + constraint_name = "bc_1" + + with pytest.raises( + BindingConstraintCreationError, + match=f"Could not create the binding constraint {constraint_name}: {self.antares_web_description_msg}", + ): + self.study.create_binding_constraint(name=constraint_name) diff --git a/tests/antares/services/api_services/test_thermal_api.py b/tests/antares/services/api_services/test_thermal_api.py index 22768c6d..9723dfa2 100644 --- a/tests/antares/services/api_services/test_thermal_api.py +++ b/tests/antares/services/api_services/test_thermal_api.py @@ -1,94 +1,142 @@ -import pandas as pd - -from antares.api_conf.api_conf import APIconf -from antares.exceptions.exceptions import ThermalPropertiesUpdateError, ThermalMatrixDownloadError -from antares.model.area import Area -from antares.model.study import Study -from antares.model.thermal import ThermalCluster, ThermalClusterProperties, ThermalClusterMatrixName -from antares.service.service_factory import ServiceFactory -import requests_mock - -import pytest - - -@pytest.fixture -def thermal_matrix_set(): - params = [ - ("get_prepro_data_matrix", ThermalClusterMatrixName.PREPRO_DATA, "input/thermal/prepro", "prepro"), - ("get_prepro_modulation_matrix", ThermalClusterMatrixName.PREPRO_MODULATION, "input/thermal/prepro", "prepro"), - ("get_series_matrix", ThermalClusterMatrixName.SERIES, "input/thermal/series", "series"), - ("get_co2_cost_matrix", ThermalClusterMatrixName.SERIES_CO2_COST, "input/thermal/series", "series"), - ("get_fuel_cost_matrix", ThermalClusterMatrixName.SERIES_FUEL_COST, "input/thermal/series", "series"), - ] - return params - - -class TestCreateAPI: - api = APIconf("https://antares.com", "token", verify=False) - study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" - study = Study("study_test", "870", ServiceFactory(api, study_id)) - area = Area( - "area-test", - ServiceFactory(api, study_id).create_area_service(), - ServiceFactory(api, study_id).create_st_storage_service(), - ServiceFactory(api, study_id).create_thermal_service(), - ServiceFactory(api, study_id).create_renewable_service(), - ) - thermal = ThermalCluster(ServiceFactory(api, study_id).create_thermal_service(), "area-test", "thermal-test") - antares_web_description_msg = "Mocked Server KO" - matrix = pd.DataFrame(data=[[0]]) - - def test_update_thermal_properties_success(self): - with requests_mock.Mocker() as mocker: - properties = ThermalClusterProperties(co2=4) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/" - f"areas/{self.thermal.area_id}/clusters/thermal/{self.thermal.id}" - ) - mocker.patch(url, json={"id": "id", "name": "name", **properties.model_dump()}, status_code=200) - self.thermal.update_properties(properties=properties) - - def test_update_thermal_properties_fails(self): - with requests_mock.Mocker() as mocker: - properties = ThermalClusterProperties(co2=4) - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}/" - f"areas/{self.thermal.area_id}/clusters/thermal/{self.thermal.id}" - ) - antares_web_description_msg = "Server KO" - mocker.patch(url, json={"description": antares_web_description_msg}, status_code=404) - - with pytest.raises( - ThermalPropertiesUpdateError, - match=f"Could not update properties for thermal cluster " - f"{self.thermal.id} inside area {self.area.id}: {antares_web_description_msg}", - ): - self.thermal.update_properties(properties=properties) - - def test_get_thermal_matrices_success(self, thermal_matrix_set): - for matrix_method, matrix_enum, path, path_suffix in thermal_matrix_set: - with requests_mock.Mocker() as mocker: - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}" - f"/raw?path=input/thermal/{path_suffix}/" - f"{self.thermal.area_id}/{self.thermal.name}/{matrix_enum.value}" - ) - mocker.get(url, json={"data": [[0]], "index": [0], "columns": [0]}, status_code=200) - result_matrix = getattr(self.thermal, matrix_method)() - assert result_matrix.equals(self.matrix) - - def test_get_thermal_matrices_fails(self, thermal_matrix_set): - for matrix_method, matrix_enum, path, path_suffix in thermal_matrix_set: - with requests_mock.Mocker() as mocker: - url = ( - f"https://antares.com/api/v1/studies/{self.study_id}" - f"/raw?path=input/thermal/{path_suffix}/" - f"{self.thermal.area_id}/{self.thermal.name}/{matrix_enum.value}" - ) - mocker.get(url, json={"description": self.antares_web_description_msg}, status_code=404) - with pytest.raises( - ThermalMatrixDownloadError, - match=f"Could not download {matrix_enum.value} for cluster {self.thermal.name}" - f" inside area {self.area.id}: {self.antares_web_description_msg}", - ): - getattr(self.thermal, matrix_method)() +import pandas as pd + +from antares.api_conf.api_conf import APIconf +from antares.exceptions.exceptions import ( + ThermalPropertiesUpdateError, + ThermalMatrixDownloadError, +) +from antares.model.area import Area +from antares.model.study import Study +from antares.model.thermal import ( + ThermalCluster, + ThermalClusterProperties, + ThermalClusterMatrixName, +) +from antares.service.service_factory import ServiceFactory +import requests_mock + +import pytest + + +@pytest.fixture +def thermal_matrix_set(): + params = [ + ( + "get_prepro_data_matrix", + ThermalClusterMatrixName.PREPRO_DATA, + "input/thermal/prepro", + "prepro", + ), + ( + "get_prepro_modulation_matrix", + ThermalClusterMatrixName.PREPRO_MODULATION, + "input/thermal/prepro", + "prepro", + ), + ( + "get_series_matrix", + ThermalClusterMatrixName.SERIES, + "input/thermal/series", + "series", + ), + ( + "get_co2_cost_matrix", + ThermalClusterMatrixName.SERIES_CO2_COST, + "input/thermal/series", + "series", + ), + ( + "get_fuel_cost_matrix", + ThermalClusterMatrixName.SERIES_FUEL_COST, + "input/thermal/series", + "series", + ), + ] + return params + + +class TestCreateAPI: + api = APIconf("https://antares.com", "token", verify=False) + study_id = "22c52f44-4c2a-407b-862b-490887f93dd8" + study = Study("study_test", "870", ServiceFactory(api, study_id)) + area = Area( + "area-test", + ServiceFactory(api, study_id).create_area_service(), + ServiceFactory(api, study_id).create_st_storage_service(), + ServiceFactory(api, study_id).create_thermal_service(), + ServiceFactory(api, study_id).create_renewable_service(), + ) + thermal = ThermalCluster( + ServiceFactory(api, study_id).create_thermal_service(), + "area-test", + "thermal-test", + ) + antares_web_description_msg = "Mocked Server KO" + matrix = pd.DataFrame(data=[[0]]) + + def test_update_thermal_properties_success(self): + with requests_mock.Mocker() as mocker: + properties = ThermalClusterProperties(co2=4) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/" + f"areas/{self.thermal.area_id}/clusters/thermal/{self.thermal.id}" + ) + mocker.patch( + url, + json={"id": "id", "name": "name", **properties.model_dump()}, + status_code=200, + ) + self.thermal.update_properties(properties=properties) + + def test_update_thermal_properties_fails(self): + with requests_mock.Mocker() as mocker: + properties = ThermalClusterProperties(co2=4) + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}/" + f"areas/{self.thermal.area_id}/clusters/thermal/{self.thermal.id}" + ) + antares_web_description_msg = "Server KO" + mocker.patch(url, json={"description": antares_web_description_msg}, status_code=404) + + with pytest.raises( + ThermalPropertiesUpdateError, + match=f"Could not update properties for thermal cluster " + f"{self.thermal.id} inside area {self.area.id}: {antares_web_description_msg}", + ): + self.thermal.update_properties(properties=properties) + + def test_get_thermal_matrices_success(self, thermal_matrix_set): + for matrix_method, matrix_enum, path, path_suffix in thermal_matrix_set: + with requests_mock.Mocker() as mocker: + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}" + f"/raw?path=input/thermal/{path_suffix}/" + f"{self.thermal.area_id}/{self.thermal.name}/{matrix_enum.value}" + ) + mocker.get( + url, + json={"data": [[0]], "index": [0], "columns": [0]}, + status_code=200, + ) + result_matrix = getattr(self.thermal, matrix_method)() + assert result_matrix.equals(self.matrix) + + def test_get_thermal_matrices_fails(self, thermal_matrix_set): + for matrix_method, matrix_enum, path, path_suffix in thermal_matrix_set: + with requests_mock.Mocker() as mocker: + url = ( + f"https://antares.com/api/v1/studies/{self.study_id}" + f"/raw?path=input/thermal/{path_suffix}/" + f"{self.thermal.area_id}/{self.thermal.name}/{matrix_enum.value}" + ) + mocker.get( + url, + json={"description": self.antares_web_description_msg}, + status_code=404, + ) + with pytest.raises( + ThermalMatrixDownloadError, + match=f"Could not download {matrix_enum.value} for cluster {self.thermal.name}" + f" inside area {self.area.id}: {self.antares_web_description_msg}", + ): + getattr(self.thermal, matrix_method)() diff --git a/tests/antares/services/local_services/conftest.py b/tests/antares/services/local_services/conftest.py index c6d9ee31..09f5b6b2 100644 --- a/tests/antares/services/local_services/conftest.py +++ b/tests/antares/services/local_services/conftest.py @@ -1,212 +1,228 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import pytest - -from antares.config.local_configuration import LocalConfiguration -from antares.model.area import Area -from antares.model.hydro import HydroProperties -from antares.model.renewable import RenewableClusterProperties, TimeSeriesInterpretation, RenewableClusterGroup -from antares.model.st_storage import STStorageProperties, STStorageGroup -from antares.model.study import Study, create_study_local -from antares.model.thermal import ( - ThermalClusterProperties, - LocalTSGenerationBehavior, - ThermalClusterGroup, - LawOption, - ThermalCostGeneration, -) -from antares.tools.ini_tool import IniFile, IniFileTypes - - -@pytest.fixture -def local_study(tmp_path) -> Study: - study_name = "studyTest" - study_version = "880" - return create_study_local(study_name, study_version, LocalConfiguration(tmp_path, study_name)) - - -@pytest.fixture -def local_study_w_areas(tmp_path, local_study) -> Study: - areas_to_create = ["fr", "it"] - for area in areas_to_create: - local_study.create_area(area) - return local_study - - -@pytest.fixture() -def local_study_w_links(tmp_path, local_study_w_areas): - local_study_w_areas.create_area("at") - links_to_create = ["fr_at", "at_it", "fr_it"] - for link in links_to_create: - area_from, area_to = link.split("_") - local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - existing_areas=local_study_w_areas.get_areas(), - ) - return local_study_w_areas - - -@pytest.fixture -def local_study_w_thermal(tmp_path, local_study_w_links) -> Study: - thermal_name = "test thermal cluster" - local_study_w_links.get_areas()["fr"].create_thermal_cluster(thermal_name) - return local_study_w_links - - -@pytest.fixture -def default_thermal_cluster_properties() -> ThermalClusterProperties: - return ThermalClusterProperties( - group=ThermalClusterGroup.OTHER1, - enabled=True, - unit_count=1, - nominal_capacity=0, - gen_ts=LocalTSGenerationBehavior.USE_GLOBAL, - min_stable_power=0, - min_up_time=1, - min_down_time=1, - must_run=False, - spinning=0, - volatility_forced=0, - volatility_planned=0, - law_forced=LawOption.UNIFORM, - law_planned=LawOption.UNIFORM, - marginal_cost=0, - spread_cost=0, - fixed_cost=0, - startup_cost=0, - market_bid_cost=0, - co2=0, - nh3=0, - so2=0, - nox=0, - pm2_5=0, - pm5=0, - pm10=0, - nmvoc=0, - op1=0, - op2=0, - op3=0, - op4=0, - op5=0, - cost_generation=ThermalCostGeneration.SET_MANUALLY, - efficiency=100, - variable_o_m_cost=0, - ) - - -@pytest.fixture -def actual_thermal_list_ini(local_study_w_thermal) -> IniFile: - return IniFile(local_study_w_thermal.service.config.study_path, IniFileTypes.THERMAL_LIST_INI, area_name="fr") - - -@pytest.fixture -def actual_thermal_areas_ini(local_study_w_thermal) -> IniFile: - return IniFile(local_study_w_thermal.service.config.study_path, IniFileTypes.THERMAL_AREAS_INI) - - -@pytest.fixture -def actual_adequacy_patch_ini(local_study_w_areas) -> IniFile: - return IniFile(local_study_w_areas.service.config.study_path, IniFileTypes.AREA_ADEQUACY_PATCH_INI, area_name="fr") - - -@pytest.fixture -def local_study_with_renewable(local_study_w_thermal) -> Study: - renewable_cluster_name = "renewable cluster" - local_study_w_thermal.get_areas()["fr"].create_renewable_cluster( - renewable_cluster_name, RenewableClusterProperties(), None - ) - return local_study_w_thermal - - -@pytest.fixture -def default_renewable_cluster_properties() -> RenewableClusterProperties: - return RenewableClusterProperties( - enabled=True, - unit_count=1, - nominal_capacity=0, - group=RenewableClusterGroup.OTHER1, - ts_interpretation=TimeSeriesInterpretation.POWER_GENERATION, - ) - - -@pytest.fixture -def actual_renewable_list_ini(local_study_with_renewable) -> IniFile: - return IniFile( - local_study_with_renewable.service.config.study_path, IniFileTypes.RENEWABLES_LIST_INI, area_name="fr" - ) - - -@pytest.fixture -def local_study_with_st_storage(local_study_with_renewable) -> Study: - storage_name = "short term storage" - local_study_with_renewable.get_areas()["fr"].create_st_storage(storage_name) - return local_study_with_renewable - - -@pytest.fixture -def default_st_storage_properties() -> STStorageProperties: - return STStorageProperties( - group=STStorageGroup.OTHER1, - injection_nominal_capacity=0, - withdrawal_nominal_capacity=0, - reservoir_capacity=0, - efficiency=1, - initial_level=0.5, - initial_level_optim=False, - enabled=True, - ) - - -@pytest.fixture -def actual_st_storage_list_ini(local_study_with_st_storage) -> IniFile: - return IniFile( - local_study_with_st_storage.service.config.study_path, IniFileTypes.ST_STORAGE_LIST_INI, area_name="fr" - ) - - -@pytest.fixture -def local_study_with_hydro(local_study_with_st_storage) -> Study: - local_study_with_st_storage.get_areas()["fr"].create_hydro() - return local_study_with_st_storage - - -@pytest.fixture -def default_hydro_properties() -> HydroProperties: - return HydroProperties( - inter_daily_breakdown=1, - intra_daily_modulation=24, - inter_monthly_breakdown=1, - reservoir=False, - reservoir_capacity=0, - follow_load=True, - use_water=False, - hard_bounds=False, - initialize_reservoir_date=0, - use_heuristic=True, - power_to_level=False, - use_leeway=False, - leeway_low=1, - leeway_up=1, - pumping_efficiency=1, - ) - - -@pytest.fixture -def actual_hydro_ini(local_study_with_hydro) -> IniFile: - return IniFile(local_study_with_hydro.service.config.study_path, IniFileTypes.HYDRO_INI) - - -@pytest.fixture -def area_fr(local_study_with_hydro) -> Area: - return local_study_with_hydro.get_areas()["fr"] +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import pytest + +from antares.config.local_configuration import LocalConfiguration +from antares.model.area import Area +from antares.model.hydro import HydroProperties +from antares.model.renewable import ( + RenewableClusterProperties, + TimeSeriesInterpretation, + RenewableClusterGroup, +) +from antares.model.st_storage import STStorageProperties, STStorageGroup +from antares.model.study import Study, create_study_local +from antares.model.thermal import ( + ThermalClusterProperties, + LocalTSGenerationBehavior, + ThermalClusterGroup, + LawOption, + ThermalCostGeneration, +) +from antares.tools.ini_tool import IniFile, IniFileTypes + + +@pytest.fixture +def local_study(tmp_path) -> Study: + study_name = "studyTest" + study_version = "880" + return create_study_local(study_name, study_version, LocalConfiguration(tmp_path, study_name)) + + +@pytest.fixture +def local_study_w_areas(tmp_path, local_study) -> Study: + areas_to_create = ["fr", "it"] + for area in areas_to_create: + local_study.create_area(area) + return local_study + + +@pytest.fixture() +def local_study_w_links(tmp_path, local_study_w_areas): + local_study_w_areas.create_area("at") + links_to_create = ["fr_at", "at_it", "fr_it"] + for link in links_to_create: + area_from, area_to = link.split("_") + local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + existing_areas=local_study_w_areas.get_areas(), + ) + return local_study_w_areas + + +@pytest.fixture +def local_study_w_thermal(tmp_path, local_study_w_links) -> Study: + thermal_name = "test thermal cluster" + local_study_w_links.get_areas()["fr"].create_thermal_cluster(thermal_name) + return local_study_w_links + + +@pytest.fixture +def default_thermal_cluster_properties() -> ThermalClusterProperties: + return ThermalClusterProperties( + group=ThermalClusterGroup.OTHER1, + enabled=True, + unit_count=1, + nominal_capacity=0, + gen_ts=LocalTSGenerationBehavior.USE_GLOBAL, + min_stable_power=0, + min_up_time=1, + min_down_time=1, + must_run=False, + spinning=0, + volatility_forced=0, + volatility_planned=0, + law_forced=LawOption.UNIFORM, + law_planned=LawOption.UNIFORM, + marginal_cost=0, + spread_cost=0, + fixed_cost=0, + startup_cost=0, + market_bid_cost=0, + co2=0, + nh3=0, + so2=0, + nox=0, + pm2_5=0, + pm5=0, + pm10=0, + nmvoc=0, + op1=0, + op2=0, + op3=0, + op4=0, + op5=0, + cost_generation=ThermalCostGeneration.SET_MANUALLY, + efficiency=100, + variable_o_m_cost=0, + ) + + +@pytest.fixture +def actual_thermal_list_ini(local_study_w_thermal) -> IniFile: + return IniFile( + local_study_w_thermal.service.config.study_path, + IniFileTypes.THERMAL_LIST_INI, + area_name="fr", + ) + + +@pytest.fixture +def actual_thermal_areas_ini(local_study_w_thermal) -> IniFile: + return IniFile(local_study_w_thermal.service.config.study_path, IniFileTypes.THERMAL_AREAS_INI) + + +@pytest.fixture +def actual_adequacy_patch_ini(local_study_w_areas) -> IniFile: + return IniFile( + local_study_w_areas.service.config.study_path, + IniFileTypes.AREA_ADEQUACY_PATCH_INI, + area_name="fr", + ) + + +@pytest.fixture +def local_study_with_renewable(local_study_w_thermal) -> Study: + renewable_cluster_name = "renewable cluster" + local_study_w_thermal.get_areas()["fr"].create_renewable_cluster( + renewable_cluster_name, RenewableClusterProperties(), None + ) + return local_study_w_thermal + + +@pytest.fixture +def default_renewable_cluster_properties() -> RenewableClusterProperties: + return RenewableClusterProperties( + enabled=True, + unit_count=1, + nominal_capacity=0, + group=RenewableClusterGroup.OTHER1, + ts_interpretation=TimeSeriesInterpretation.POWER_GENERATION, + ) + + +@pytest.fixture +def actual_renewable_list_ini(local_study_with_renewable) -> IniFile: + return IniFile( + local_study_with_renewable.service.config.study_path, + IniFileTypes.RENEWABLES_LIST_INI, + area_name="fr", + ) + + +@pytest.fixture +def local_study_with_st_storage(local_study_with_renewable) -> Study: + storage_name = "short term storage" + local_study_with_renewable.get_areas()["fr"].create_st_storage(storage_name) + return local_study_with_renewable + + +@pytest.fixture +def default_st_storage_properties() -> STStorageProperties: + return STStorageProperties( + group=STStorageGroup.OTHER1, + injection_nominal_capacity=0, + withdrawal_nominal_capacity=0, + reservoir_capacity=0, + efficiency=1, + initial_level=0.5, + initial_level_optim=False, + enabled=True, + ) + + +@pytest.fixture +def actual_st_storage_list_ini(local_study_with_st_storage) -> IniFile: + return IniFile( + local_study_with_st_storage.service.config.study_path, + IniFileTypes.ST_STORAGE_LIST_INI, + area_name="fr", + ) + + +@pytest.fixture +def local_study_with_hydro(local_study_with_st_storage) -> Study: + local_study_with_st_storage.get_areas()["fr"].create_hydro() + return local_study_with_st_storage + + +@pytest.fixture +def default_hydro_properties() -> HydroProperties: + return HydroProperties( + inter_daily_breakdown=1, + intra_daily_modulation=24, + inter_monthly_breakdown=1, + reservoir=False, + reservoir_capacity=0, + follow_load=True, + use_water=False, + hard_bounds=False, + initialize_reservoir_date=0, + use_heuristic=True, + power_to_level=False, + use_leeway=False, + leeway_low=1, + leeway_up=1, + pumping_efficiency=1, + ) + + +@pytest.fixture +def actual_hydro_ini(local_study_with_hydro) -> IniFile: + return IniFile(local_study_with_hydro.service.config.study_path, IniFileTypes.HYDRO_INI) + + +@pytest.fixture +def area_fr(local_study_with_hydro) -> Area: + return local_study_with_hydro.get_areas()["fr"] diff --git a/tests/antares/services/local_services/test_area.py b/tests/antares/services/local_services/test_area.py index a65485bd..eea534ce 100644 --- a/tests/antares/services/local_services/test_area.py +++ b/tests/antares/services/local_services/test_area.py @@ -1,796 +1,816 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -from configparser import ConfigParser -from io import StringIO - -import numpy as np -import pandas as pd - -from antares.model.hydro import Hydro -from antares.model.renewable import ( - RenewableClusterProperties, - RenewableCluster, - RenewableClusterGroup, - TimeSeriesInterpretation, - RenewableClusterPropertiesLocal, -) -from antares.model.st_storage import STStorage, STStoragePropertiesLocal, STStorageProperties, STStorageGroup -from antares.model.thermal import ( - ThermalCluster, - ThermalClusterProperties, - ThermalClusterGroup, - LocalTSGenerationBehavior, - LawOption, - ThermalCostGeneration, - ThermalClusterPropertiesLocal, -) -from antares.tools.ini_tool import IniFileTypes, IniFile -from antares.tools.time_series_tool import TimeSeriesFileType - - -class TestCreateThermalCluster: - def test_can_be_created(self, local_study_w_areas): - # Given - thermal_name = "test_thermal_cluster" - - # When - created_thermal = local_study_w_areas.get_areas()["fr"].create_thermal_cluster(thermal_name) - assert isinstance(created_thermal, ThermalCluster) - - def test_has_default_properties(self, local_study_w_thermal): - assert ( - local_study_w_thermal.get_areas()["fr"] - .get_thermals()["test thermal cluster"] - .properties.model_dump(exclude_none=True) - ) - - def test_has_correct_default_properties(self, local_study_w_thermal, default_thermal_cluster_properties): - # Given - expected_thermal_cluster_properties = default_thermal_cluster_properties - - # When - actual_thermal_cluster_properties = ( - local_study_w_thermal.get_areas()["fr"].get_thermals()["test thermal cluster"].properties - ) - - assert expected_thermal_cluster_properties == actual_thermal_cluster_properties - - def test_required_ini_files_exist(self, tmp_path, local_study_w_thermal): - # Given - expected_list_ini_path = ( - local_study_w_thermal.service.config.study_path / IniFileTypes.THERMAL_LIST_INI.value.format(area_name="fr") - ) - expected_areas_ini_path = local_study_w_thermal.service.config.study_path / IniFileTypes.THERMAL_AREAS_INI.value - - # Then - assert expected_list_ini_path.is_file() - assert expected_areas_ini_path.is_file() - - def test_list_ini_has_default_properties(self, tmp_path, local_study_w_thermal, actual_thermal_list_ini): - # Given - expected_list_ini_contents = """[test thermal cluster] -group = Other 1 -name = test thermal cluster -enabled = True -unitcount = 1 -nominalcapacity = 0.000000 -gen-ts = use global -min-stable-power = 0.000000 -min-up-time = 1 -min-down-time = 1 -must-run = False -spinning = 0.000000 -volatility.forced = 0.000000 -volatility.planned = 0.000000 -law.forced = uniform -law.planned = uniform -marginal-cost = 0.000000 -spread-cost = 0.000000 -fixed-cost = 0.000000 -startup-cost = 0.000000 -market-bid-cost = 0.000000 -co2 = 0.000000 -nh3 = 0.000000 -so2 = 0.000000 -nox = 0.000000 -pm2_5 = 0.000000 -pm5 = 0.000000 -pm10 = 0.000000 -nmvoc = 0.000000 -op1 = 0.000000 -op2 = 0.000000 -op3 = 0.000000 -op4 = 0.000000 -op5 = 0.000000 -costgeneration = SetManually -efficiency = 100.000000 -variableomcost = 0.000000 - -""" - expected_list_ini = ConfigParser() - expected_list_ini.read_string(expected_list_ini_contents) - with actual_thermal_list_ini.ini_path.open("r") as actual_list_ini_file: - actual_list_ini_contents = actual_list_ini_file.read() - - # Then - assert actual_thermal_list_ini.parsed_ini.sections() == expected_list_ini.sections() - assert actual_list_ini_contents == expected_list_ini_contents - assert actual_thermal_list_ini.parsed_ini == expected_list_ini - - def test_list_ini_has_custom_properties(self, tmp_path, local_study_w_areas, actual_thermal_list_ini): - # Given - expected_list_ini_contents = """[test thermal cluster] -group = Nuclear -name = test thermal cluster -enabled = False -unitcount = 12 -nominalcapacity = 3.900000 -gen-ts = force no generation -min-stable-power = 3.100000 -min-up-time = 3 -min-down-time = 2 -must-run = True -spinning = 2.300000 -volatility.forced = 3.500000 -volatility.planned = 3.700000 -law.forced = geometric -law.planned = geometric -marginal-cost = 2.900000 -spread-cost = 4.200000 -fixed-cost = 3.600000 -startup-cost = 0.700000 -market-bid-cost = 0.800000 -co2 = 1.000000 -nh3 = 2.000000 -so2 = 3.000000 -nox = 4.000000 -pm2_5 = 5.000000 -pm5 = 6.000000 -pm10 = 7.000000 -nmvoc = 8.000000 -op1 = 9.000000 -op2 = 10.000000 -op3 = 11.000000 -op4 = 12.000000 -op5 = 13.000000 -costgeneration = useCostTimeseries -efficiency = 123.400000 -variableomcost = 5.000000 - -""" - expected_list_ini = ConfigParser() - expected_list_ini.read_string(expected_list_ini_contents) - thermal_cluster_properties = ThermalClusterProperties( - group=ThermalClusterGroup.NUCLEAR, - enabled=False, - unit_count=12, - nominal_capacity=3.9, - gen_ts=LocalTSGenerationBehavior.FORCE_NO_GENERATION, - min_stable_power=3.1, - min_up_time=3, - min_down_time=2, - must_run=True, - spinning=2.3, - volatility_forced=3.5, - volatility_planned=3.7, - law_forced=LawOption.GEOMETRIC, - law_planned=LawOption.GEOMETRIC, - marginal_cost=2.9, - spread_cost=4.2, - fixed_cost=3.6, - startup_cost=0.7, - market_bid_cost=0.8, - co2=1.0, - nh3=2.0, - so2=3.0, - nox=4.0, - pm2_5=5.0, - pm5=6.0, - pm10=7.0, - nmvoc=8.0, - op1=9.0, - op2=10.0, - op3=11.0, - op4=12.0, - op5=13.0, - cost_generation=ThermalCostGeneration.USE_COST_TIME_SERIES, - efficiency=123.4, - variable_o_m_cost=5.0, - ) - - # When - local_study_w_areas.get_areas()["fr"].create_thermal_cluster("test thermal cluster", thermal_cluster_properties) - - actual_thermal_list_ini.update_from_ini_file() - with actual_thermal_list_ini.ini_path.open("r") as actual_list_ini_file: - actual_list_ini_contents = actual_list_ini_file.read() - - # Then - assert actual_thermal_list_ini.parsed_ini.sections() == expected_list_ini.sections() - assert actual_list_ini_contents == expected_list_ini_contents - assert actual_thermal_list_ini.parsed_ini == expected_list_ini - - def test_list_ini_has_multiple_clusters( - self, local_study_w_thermal, actual_thermal_list_ini, default_thermal_cluster_properties - ): - # Given - local_study_w_thermal.get_areas()["fr"].create_thermal_cluster("test thermal cluster two") - expected_list_ini_dict = ThermalClusterPropertiesLocal( - thermal_name="test thermal cluster", thermal_cluster_properties=default_thermal_cluster_properties - ).list_ini_fields - expected_list_ini_dict.update( - ThermalClusterPropertiesLocal( - thermal_name="test thermal cluster two", thermal_cluster_properties=default_thermal_cluster_properties - ).list_ini_fields - ) - - expected_list_ini = ConfigParser() - expected_list_ini.read_dict(expected_list_ini_dict) - - # When - actual_thermal_list_ini.update_from_ini_file() - - # Then - assert actual_thermal_list_ini.parsed_ini.sections() == expected_list_ini.sections() - assert actual_thermal_list_ini.parsed_ini == expected_list_ini - - def test_clusters_are_alphabetical_in_list_ini( - self, local_study_w_thermal, actual_thermal_list_ini, default_thermal_cluster_properties - ): - # Given - first_cluster_alphabetically = "a is before b and t" - second_cluster_alphabetically = "b is after a" - - expected_list_ini_dict = ThermalClusterPropertiesLocal( - thermal_name=first_cluster_alphabetically, thermal_cluster_properties=default_thermal_cluster_properties - ).list_ini_fields - expected_list_ini_dict.update( - ThermalClusterPropertiesLocal( - thermal_name=second_cluster_alphabetically, - thermal_cluster_properties=default_thermal_cluster_properties, - ).list_ini_fields - ) - expected_list_ini_dict.update( - ThermalClusterPropertiesLocal( - thermal_name="test thermal cluster", thermal_cluster_properties=default_thermal_cluster_properties - ).list_ini_fields - ) - expected_list_ini = ConfigParser() - expected_list_ini.read_dict(expected_list_ini_dict) - - # When - local_study_w_thermal.get_areas()["fr"].create_thermal_cluster(second_cluster_alphabetically) - local_study_w_thermal.get_areas()["fr"].create_thermal_cluster(first_cluster_alphabetically) - actual_thermal_list_ini.update_from_ini_file() - - # Then - assert actual_thermal_list_ini.ini_dict.keys() == expected_list_ini_dict.keys() - assert actual_thermal_list_ini.parsed_ini.sections() == expected_list_ini.sections() - assert actual_thermal_list_ini.parsed_ini == expected_list_ini - - -class TestCreateRenewablesCluster: - def test_can_create_renewables_cluster(self, local_study_w_thermal): - # When - renewable_cluster_name = "renewable cluster" - local_study_w_thermal.get_areas()["fr"].create_renewable_cluster( - renewable_cluster_name, RenewableClusterProperties(), None - ) - - # Then - assert local_study_w_thermal.get_areas()["fr"].get_renewables() - assert isinstance( - local_study_w_thermal.get_areas()["fr"].get_renewables()[renewable_cluster_name], RenewableCluster - ) - - def test_renewable_cluster_has_properties(self, local_study_with_renewable): - assert ( - local_study_with_renewable.get_areas()["fr"] - .get_renewables()["renewable cluster"] - .properties.model_dump(exclude_none=True) - ) - - def test_renewable_cluster_has_correct_default_properties( - self, local_study_with_renewable, default_renewable_cluster_properties - ): - assert ( - local_study_with_renewable.get_areas()["fr"].get_renewables()["renewable cluster"].properties - == default_renewable_cluster_properties - ) - - def test_renewables_list_ini_exists(self, local_study_with_renewable): - renewables_list_ini = ( - local_study_with_renewable.service.config.study_path - / IniFileTypes.RENEWABLES_LIST_INI.value.format(area_name="fr") - ) - assert renewables_list_ini.is_file() - - def test_renewable_list_ini_has_correct_default_values( - self, default_renewable_cluster_properties, actual_renewable_list_ini - ): - # Given - expected_renewables_list_ini_content = """[renewable cluster] -name = renewable cluster -group = Other RES 1 -enabled = true -nominalcapacity = 0.000000 -unitcount = 1 -ts-interpretation = power-generation - -""" - expected_renewables_list_ini = ConfigParser() - expected_renewables_list_ini.read_string(expected_renewables_list_ini_content) - - # When - with actual_renewable_list_ini.ini_path.open() as renewables_list_ini_file: - actual_renewable_list_ini_content = renewables_list_ini_file.read() - - assert actual_renewable_list_ini_content == expected_renewables_list_ini_content - assert actual_renewable_list_ini.parsed_ini.sections() == expected_renewables_list_ini.sections() - assert actual_renewable_list_ini.parsed_ini == expected_renewables_list_ini - - def test_renewable_cluster_and_ini_have_custom_properties(self, local_study_w_thermal, actual_renewable_list_ini): - # Given - custom_properties = RenewableClusterPropertiesLocal( - "renewable cluster", - RenewableClusterProperties( - group=RenewableClusterGroup.WIND_OFF_SHORE, ts_interpretation=TimeSeriesInterpretation.PRODUCTION_FACTOR - ), - ) - expected_renewables_list_ini_content = """[renewable cluster] -name = renewable cluster -group = Wind Offshore -enabled = true -nominalcapacity = 0.000000 -unitcount = 1 -ts-interpretation = production-factor - -""" - - # When - local_study_w_thermal.get_areas()["fr"].create_renewable_cluster( - renewable_name=custom_properties.renewable_name, - properties=custom_properties.yield_renewable_cluster_properties(), - series=None, - ) - with actual_renewable_list_ini.ini_path.open() as renewables_list_ini_file: - actual_renewable_list_ini_content = renewables_list_ini_file.read() - - assert ( - local_study_w_thermal.get_areas()["fr"].get_renewables()["renewable cluster"].properties - == custom_properties.yield_renewable_cluster_properties() - ) - assert actual_renewable_list_ini_content == expected_renewables_list_ini_content - - -class TestCreateSTStorage: - def test_can_create_st_storage(self, local_study_with_renewable): - # When - storage_name = "short term storage" - local_study_with_renewable.get_areas()["fr"].create_st_storage(storage_name) - - # Then - assert local_study_with_renewable.get_areas()["fr"].get_st_storages() - assert isinstance(local_study_with_renewable.get_areas()["fr"].get_st_storages()[storage_name], STStorage) - - def test_storage_has_properties(self, local_study_with_st_storage): - assert ( - local_study_with_st_storage.get_areas()["fr"] - .get_st_storages()["short term storage"] - .properties.model_dump(exclude_none=True) - ) - - def test_storage_has_correct_default_properties(self, local_study_with_st_storage, default_st_storage_properties): - assert ( - local_study_with_st_storage.get_areas()["fr"].get_st_storages()["short term storage"].properties - == default_st_storage_properties - ) - - def test_st_storage_list_ini_exists(self, local_study_with_st_storage): - st_storage_list_ini = ( - local_study_with_st_storage.service.config.study_path - / IniFileTypes.ST_STORAGE_LIST_INI.value.format(area_name="fr") - ) - assert st_storage_list_ini.is_file() - - def test_st_storage_list_ini_has_correct_default_values( - self, default_st_storage_properties, actual_st_storage_list_ini - ): - # Given - expected_st_storage_list_ini_content = """[short term storage] -name = short term storage -group = Other1 -injectionnominalcapacity = 0.000000 -withdrawalnominalcapacity = 0.000000 -reservoircapacity = 0.000000 -efficiency = 1.000000 -initiallevel = 0.500000 -initialleveloptim = false -enabled = true - -""" - expected_st_storage_list_ini = ConfigParser() - expected_st_storage_list_ini.read_string(expected_st_storage_list_ini_content) - - # When - with actual_st_storage_list_ini.ini_path.open() as st_storage_list_ini_file: - actual_st_storage_list_ini_content = st_storage_list_ini_file.read() - - assert actual_st_storage_list_ini_content == expected_st_storage_list_ini_content - assert actual_st_storage_list_ini.parsed_ini.sections() == expected_st_storage_list_ini.sections() - assert actual_st_storage_list_ini.parsed_ini == expected_st_storage_list_ini - - def test_st_storage_and_ini_have_custom_properties(self, local_study_with_st_storage, actual_st_storage_list_ini): - # Given - custom_properties = STStoragePropertiesLocal( - "short term storage", - STStorageProperties(group=STStorageGroup.BATTERY, reservoir_capacity=12.345), - ) - expected_st_storage_list_ini_content = """[short term storage] -name = short term storage -group = Battery -injectionnominalcapacity = 0.000000 -withdrawalnominalcapacity = 0.000000 -reservoircapacity = 12.345000 -efficiency = 1.000000 -initiallevel = 0.500000 -initialleveloptim = false -enabled = true - -""" - - # When - local_study_with_st_storage.get_areas()["fr"].create_st_storage( - st_storage_name=custom_properties.st_storage_name, - properties=custom_properties.yield_st_storage_properties(), - ) - with actual_st_storage_list_ini.ini_path.open() as st_storage_list_ini_file: - actual_st_storage_list_ini_content = st_storage_list_ini_file.read() - - assert ( - local_study_with_st_storage.get_areas()["fr"].get_st_storages()["short term storage"].properties - == custom_properties.yield_st_storage_properties() - ) - assert actual_st_storage_list_ini_content == expected_st_storage_list_ini_content - - -class TestCreateHydro: - def test_can_create_hydro(self, local_study_with_st_storage): - # When - local_study_with_st_storage.get_areas()["fr"].create_hydro() - - # Then - assert local_study_with_st_storage.get_areas()["fr"].hydro - assert isinstance(local_study_with_st_storage.get_areas()["fr"].hydro, Hydro) - - def test_hydro_has_properties(self, local_study_w_areas): - assert local_study_w_areas.get_areas()["fr"].hydro.properties - - def test_hydro_has_correct_default_properties(self, local_study_w_areas, default_hydro_properties): - assert local_study_w_areas.get_areas()["fr"].hydro.properties == default_hydro_properties - - def test_hydro_ini_exists(self, local_study_w_areas): - hydro_ini = local_study_w_areas.service.config.study_path / IniFileTypes.HYDRO_INI.value - assert hydro_ini.is_file() - - def test_hydro_ini_has_correct_default_values(self, local_study_w_areas): - # Given - expected_hydro_ini_content = """[inter-daily-breakdown] -fr = 1.000000 -it = 1.000000 - -[intra-daily-modulation] -fr = 24.000000 -it = 24.000000 - -[inter-monthly-breakdown] -fr = 1.000000 -it = 1.000000 - -[reservoir] -fr = false -it = false - -[reservoir capacity] -fr = 0.000000 -it = 0.000000 - -[follow load] -fr = true -it = true - -[use water] -fr = false -it = false - -[hard bounds] -fr = false -it = false - -[initialize reservoir date] -fr = 0 -it = 0 - -[use heuristic] -fr = true -it = true - -[power to level] -fr = false -it = false - -[use leeway] -fr = false -it = false - -[leeway low] -fr = 1.000000 -it = 1.000000 - -[leeway up] -fr = 1.000000 -it = 1.000000 - -[pumping efficiency] -fr = 1.000000 -it = 1.000000 - -""" - expected_hydro_ini = ConfigParser() - expected_hydro_ini.read_string(expected_hydro_ini_content) - actual_hydro_ini = IniFile(local_study_w_areas.service.config.study_path, IniFileTypes.HYDRO_INI) - - # When - with actual_hydro_ini.ini_path.open() as st_storage_list_ini_file: - actual_hydro_ini_content = st_storage_list_ini_file.read() - - assert actual_hydro_ini_content == expected_hydro_ini_content - assert actual_hydro_ini.parsed_ini.sections() == expected_hydro_ini.sections() - assert actual_hydro_ini.parsed_ini == expected_hydro_ini - - def test_hydro_ini_has_correct_sorted_areas(self, actual_hydro_ini): - # Given - expected_hydro_ini_content = """[inter-daily-breakdown] -at = 1.000000 -fr = 1.000000 -it = 1.000000 - -[intra-daily-modulation] -at = 24.000000 -fr = 24.000000 -it = 24.000000 - -[inter-monthly-breakdown] -at = 1.000000 -fr = 1.000000 -it = 1.000000 - -[reservoir] -at = false -fr = false -it = false - -[reservoir capacity] -at = 0.000000 -fr = 0.000000 -it = 0.000000 - -[follow load] -at = true -fr = true -it = true - -[use water] -at = false -fr = false -it = false - -[hard bounds] -at = false -fr = false -it = false - -[initialize reservoir date] -at = 0 -fr = 0 -it = 0 - -[use heuristic] -at = true -fr = true -it = true - -[power to level] -at = false -fr = false -it = false - -[use leeway] -at = false -fr = false -it = false - -[leeway low] -at = 1.000000 -fr = 1.000000 -it = 1.000000 - -[leeway up] -at = 1.000000 -fr = 1.000000 -it = 1.000000 - -[pumping efficiency] -at = 1.000000 -fr = 1.000000 -it = 1.000000 - -""" - expected_hydro_ini = ConfigParser() - expected_hydro_ini.read_string(expected_hydro_ini_content) - - # When - with actual_hydro_ini.ini_path.open() as st_storage_list_ini_file: - actual_hydro_ini_content = st_storage_list_ini_file.read() - - assert actual_hydro_ini_content == expected_hydro_ini_content - assert actual_hydro_ini.parsed_ini.sections() == expected_hydro_ini.sections() - assert actual_hydro_ini.parsed_ini == expected_hydro_ini - - -class TestCreateReserves: - def test_can_create_reserves_ts_file(self, area_fr): - # Given - reserves_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.RESERVES.value.format( - area_id=area_fr.id - ) - expected_reserves_file_path = area_fr._area_service.config.study_path / "input/reserves/fr.txt" - - # When - area_fr.create_reserves(None) - - # Then - assert reserves_file_path == expected_reserves_file_path - assert reserves_file_path.exists() - assert reserves_file_path.is_file() - - def test_can_create_reserves_ts_file_with_time_series(self, area_fr): - # Given - reserves_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.RESERVES.value.format( - area_id=area_fr.id - ) - expected_time_series_string = """1.0\t1.0\t1.0 -1.0\t1.0\t1.0 -""" - expected_time_series = pd.read_csv(StringIO(expected_time_series_string), sep="\t", header=None) - - # When - area_fr.create_reserves(pd.DataFrame(np.ones([2, 3]))) - actual_time_series = pd.read_csv(reserves_file_path, sep="\t", header=None) - with reserves_file_path.open("r") as reserves_ts_file: - actual_time_series_string = reserves_ts_file.read() - - # Then - assert actual_time_series.equals(expected_time_series) - assert actual_time_series_string == expected_time_series_string - - -class TestCreateMiscGen: - def test_can_create_misc_gen_ts_file(self, area_fr): - # Given - misc_gen_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.MISC_GEN.value.format( - area_id=area_fr.id - ) - expected_misc_gen_file_path = area_fr._area_service.config.study_path / "input/misc-gen/miscgen-fr.txt" - - # When - area_fr.create_misc_gen(None) - - # Then - assert misc_gen_file_path == expected_misc_gen_file_path - assert misc_gen_file_path.exists() - assert misc_gen_file_path.is_file() - - def test_can_create_misc_gen_ts_file_with_time_series(self, area_fr): - # Given - misc_gen_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.MISC_GEN.value.format( - area_id=area_fr.id - ) - expected_time_series_string = """1.0\t1.0\t1.0 -1.0\t1.0\t1.0 -""" - expected_time_series = pd.read_csv(StringIO(expected_time_series_string), sep="\t", header=None) - - # When - area_fr.create_misc_gen(pd.DataFrame(np.ones([2, 3]))) - actual_time_series = pd.read_csv(misc_gen_file_path, sep="\t", header=None) - with misc_gen_file_path.open("r") as misc_gen_ts_file: - actual_time_series_string = misc_gen_ts_file.read() - - # Then - assert actual_time_series.equals(expected_time_series) - assert actual_time_series_string == expected_time_series_string - - -class TestCreateWind: - def test_can_create_wind_ts_file(self, area_fr): - # Given - wind_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.WIND.value.format( - area_id=area_fr.id - ) - expected_wind_file_path = area_fr._area_service.config.study_path / "input/wind/series/wind_fr.txt" - - # When - area_fr.create_wind(None) - - # Then - assert wind_file_path == expected_wind_file_path - assert wind_file_path.exists() - assert wind_file_path.is_file() - - def test_can_create_wind_ts_file_with_time_series(self, area_fr): - # Given - wind_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.WIND.value.format( - area_id=area_fr.id - ) - expected_time_series_string = """1.0\t1.0\t1.0 -1.0\t1.0\t1.0 -""" - expected_time_series = pd.read_csv(StringIO(expected_time_series_string), sep="\t", header=None) - - # When - area_fr.create_wind(pd.DataFrame(np.ones([2, 3]))) - actual_time_series = pd.read_csv(wind_file_path, sep="\t", header=None) - with wind_file_path.open("r") as wind_ts_file: - actual_time_series_string = wind_ts_file.read() - - # Then - assert actual_time_series.equals(expected_time_series) - assert actual_time_series_string == expected_time_series_string - - -class TestCreateSolar: - def test_can_create_solar_ts_file(self, area_fr): - # Given - solar_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.SOLAR.value.format( - area_id=area_fr.id - ) - expected_solar_file_path = area_fr._area_service.config.study_path / "input/solar/series/solar_fr.txt" - - # When - area_fr.create_solar(None) - - # Then - assert solar_file_path == expected_solar_file_path - assert solar_file_path.exists() - assert solar_file_path.is_file() - - def test_can_create_solar_ts_file_with_time_series(self, area_fr): - # Given - solar_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.SOLAR.value.format( - area_id=area_fr.id - ) - expected_time_series_string = """1.0\t1.0\t1.0 -1.0\t1.0\t1.0 -""" - expected_time_series = pd.read_csv(StringIO(expected_time_series_string), sep="\t", header=None) - - # When - area_fr.create_solar(pd.DataFrame(np.ones([2, 3]))) - actual_time_series = pd.read_csv(solar_file_path, sep="\t", header=None) - with solar_file_path.open("r") as solar_ts_file: - actual_time_series_string = solar_ts_file.read() - - # Then - assert actual_time_series.equals(expected_time_series) - assert actual_time_series_string == expected_time_series_string +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +from configparser import ConfigParser +from io import StringIO + +import numpy as np +import pandas as pd + +from antares.model.hydro import Hydro +from antares.model.renewable import ( + RenewableClusterProperties, + RenewableCluster, + RenewableClusterGroup, + TimeSeriesInterpretation, + RenewableClusterPropertiesLocal, +) +from antares.model.st_storage import ( + STStorage, + STStoragePropertiesLocal, + STStorageProperties, + STStorageGroup, +) +from antares.model.thermal import ( + ThermalCluster, + ThermalClusterProperties, + ThermalClusterGroup, + LocalTSGenerationBehavior, + LawOption, + ThermalCostGeneration, + ThermalClusterPropertiesLocal, +) +from antares.tools.ini_tool import IniFileTypes, IniFile +from antares.tools.time_series_tool import TimeSeriesFileType + + +class TestCreateThermalCluster: + def test_can_be_created(self, local_study_w_areas): + # Given + thermal_name = "test_thermal_cluster" + + # When + created_thermal = local_study_w_areas.get_areas()["fr"].create_thermal_cluster(thermal_name) + assert isinstance(created_thermal, ThermalCluster) + + def test_has_default_properties(self, local_study_w_thermal): + assert ( + local_study_w_thermal.get_areas()["fr"] + .get_thermals()["test thermal cluster"] + .properties.model_dump(exclude_none=True) + ) + + def test_has_correct_default_properties(self, local_study_w_thermal, default_thermal_cluster_properties): + # Given + expected_thermal_cluster_properties = default_thermal_cluster_properties + + # When + actual_thermal_cluster_properties = ( + local_study_w_thermal.get_areas()["fr"].get_thermals()["test thermal cluster"].properties + ) + + assert expected_thermal_cluster_properties == actual_thermal_cluster_properties + + def test_required_ini_files_exist(self, tmp_path, local_study_w_thermal): + # Given + expected_list_ini_path = ( + local_study_w_thermal.service.config.study_path / IniFileTypes.THERMAL_LIST_INI.value.format(area_name="fr") + ) + expected_areas_ini_path = local_study_w_thermal.service.config.study_path / IniFileTypes.THERMAL_AREAS_INI.value + + # Then + assert expected_list_ini_path.is_file() + assert expected_areas_ini_path.is_file() + + def test_list_ini_has_default_properties(self, tmp_path, local_study_w_thermal, actual_thermal_list_ini): + # Given + expected_list_ini_contents = """[test thermal cluster] +group = Other 1 +name = test thermal cluster +enabled = True +unitcount = 1 +nominalcapacity = 0.000000 +gen-ts = use global +min-stable-power = 0.000000 +min-up-time = 1 +min-down-time = 1 +must-run = False +spinning = 0.000000 +volatility.forced = 0.000000 +volatility.planned = 0.000000 +law.forced = uniform +law.planned = uniform +marginal-cost = 0.000000 +spread-cost = 0.000000 +fixed-cost = 0.000000 +startup-cost = 0.000000 +market-bid-cost = 0.000000 +co2 = 0.000000 +nh3 = 0.000000 +so2 = 0.000000 +nox = 0.000000 +pm2_5 = 0.000000 +pm5 = 0.000000 +pm10 = 0.000000 +nmvoc = 0.000000 +op1 = 0.000000 +op2 = 0.000000 +op3 = 0.000000 +op4 = 0.000000 +op5 = 0.000000 +costgeneration = SetManually +efficiency = 100.000000 +variableomcost = 0.000000 + +""" + expected_list_ini = ConfigParser() + expected_list_ini.read_string(expected_list_ini_contents) + with actual_thermal_list_ini.ini_path.open("r") as actual_list_ini_file: + actual_list_ini_contents = actual_list_ini_file.read() + + # Then + assert actual_thermal_list_ini.parsed_ini.sections() == expected_list_ini.sections() + assert actual_list_ini_contents == expected_list_ini_contents + assert actual_thermal_list_ini.parsed_ini == expected_list_ini + + def test_list_ini_has_custom_properties(self, tmp_path, local_study_w_areas, actual_thermal_list_ini): + # Given + expected_list_ini_contents = """[test thermal cluster] +group = Nuclear +name = test thermal cluster +enabled = False +unitcount = 12 +nominalcapacity = 3.900000 +gen-ts = force no generation +min-stable-power = 3.100000 +min-up-time = 3 +min-down-time = 2 +must-run = True +spinning = 2.300000 +volatility.forced = 3.500000 +volatility.planned = 3.700000 +law.forced = geometric +law.planned = geometric +marginal-cost = 2.900000 +spread-cost = 4.200000 +fixed-cost = 3.600000 +startup-cost = 0.700000 +market-bid-cost = 0.800000 +co2 = 1.000000 +nh3 = 2.000000 +so2 = 3.000000 +nox = 4.000000 +pm2_5 = 5.000000 +pm5 = 6.000000 +pm10 = 7.000000 +nmvoc = 8.000000 +op1 = 9.000000 +op2 = 10.000000 +op3 = 11.000000 +op4 = 12.000000 +op5 = 13.000000 +costgeneration = useCostTimeseries +efficiency = 123.400000 +variableomcost = 5.000000 + +""" + expected_list_ini = ConfigParser() + expected_list_ini.read_string(expected_list_ini_contents) + thermal_cluster_properties = ThermalClusterProperties( + group=ThermalClusterGroup.NUCLEAR, + enabled=False, + unit_count=12, + nominal_capacity=3.9, + gen_ts=LocalTSGenerationBehavior.FORCE_NO_GENERATION, + min_stable_power=3.1, + min_up_time=3, + min_down_time=2, + must_run=True, + spinning=2.3, + volatility_forced=3.5, + volatility_planned=3.7, + law_forced=LawOption.GEOMETRIC, + law_planned=LawOption.GEOMETRIC, + marginal_cost=2.9, + spread_cost=4.2, + fixed_cost=3.6, + startup_cost=0.7, + market_bid_cost=0.8, + co2=1.0, + nh3=2.0, + so2=3.0, + nox=4.0, + pm2_5=5.0, + pm5=6.0, + pm10=7.0, + nmvoc=8.0, + op1=9.0, + op2=10.0, + op3=11.0, + op4=12.0, + op5=13.0, + cost_generation=ThermalCostGeneration.USE_COST_TIME_SERIES, + efficiency=123.4, + variable_o_m_cost=5.0, + ) + + # When + local_study_w_areas.get_areas()["fr"].create_thermal_cluster("test thermal cluster", thermal_cluster_properties) + + actual_thermal_list_ini.update_from_ini_file() + with actual_thermal_list_ini.ini_path.open("r") as actual_list_ini_file: + actual_list_ini_contents = actual_list_ini_file.read() + + # Then + assert actual_thermal_list_ini.parsed_ini.sections() == expected_list_ini.sections() + assert actual_list_ini_contents == expected_list_ini_contents + assert actual_thermal_list_ini.parsed_ini == expected_list_ini + + def test_list_ini_has_multiple_clusters( + self, + local_study_w_thermal, + actual_thermal_list_ini, + default_thermal_cluster_properties, + ): + # Given + local_study_w_thermal.get_areas()["fr"].create_thermal_cluster("test thermal cluster two") + expected_list_ini_dict = ThermalClusterPropertiesLocal( + thermal_name="test thermal cluster", + thermal_cluster_properties=default_thermal_cluster_properties, + ).list_ini_fields + expected_list_ini_dict.update( + ThermalClusterPropertiesLocal( + thermal_name="test thermal cluster two", + thermal_cluster_properties=default_thermal_cluster_properties, + ).list_ini_fields + ) + + expected_list_ini = ConfigParser() + expected_list_ini.read_dict(expected_list_ini_dict) + + # When + actual_thermal_list_ini.update_from_ini_file() + + # Then + assert actual_thermal_list_ini.parsed_ini.sections() == expected_list_ini.sections() + assert actual_thermal_list_ini.parsed_ini == expected_list_ini + + def test_clusters_are_alphabetical_in_list_ini( + self, + local_study_w_thermal, + actual_thermal_list_ini, + default_thermal_cluster_properties, + ): + # Given + first_cluster_alphabetically = "a is before b and t" + second_cluster_alphabetically = "b is after a" + + expected_list_ini_dict = ThermalClusterPropertiesLocal( + thermal_name=first_cluster_alphabetically, + thermal_cluster_properties=default_thermal_cluster_properties, + ).list_ini_fields + expected_list_ini_dict.update( + ThermalClusterPropertiesLocal( + thermal_name=second_cluster_alphabetically, + thermal_cluster_properties=default_thermal_cluster_properties, + ).list_ini_fields + ) + expected_list_ini_dict.update( + ThermalClusterPropertiesLocal( + thermal_name="test thermal cluster", + thermal_cluster_properties=default_thermal_cluster_properties, + ).list_ini_fields + ) + expected_list_ini = ConfigParser() + expected_list_ini.read_dict(expected_list_ini_dict) + + # When + local_study_w_thermal.get_areas()["fr"].create_thermal_cluster(second_cluster_alphabetically) + local_study_w_thermal.get_areas()["fr"].create_thermal_cluster(first_cluster_alphabetically) + actual_thermal_list_ini.update_from_ini_file() + + # Then + assert actual_thermal_list_ini.ini_dict.keys() == expected_list_ini_dict.keys() + assert actual_thermal_list_ini.parsed_ini.sections() == expected_list_ini.sections() + assert actual_thermal_list_ini.parsed_ini == expected_list_ini + + +class TestCreateRenewablesCluster: + def test_can_create_renewables_cluster(self, local_study_w_thermal): + # When + renewable_cluster_name = "renewable cluster" + local_study_w_thermal.get_areas()["fr"].create_renewable_cluster( + renewable_cluster_name, RenewableClusterProperties(), None + ) + + # Then + assert local_study_w_thermal.get_areas()["fr"].get_renewables() + assert isinstance( + local_study_w_thermal.get_areas()["fr"].get_renewables()[renewable_cluster_name], + RenewableCluster, + ) + + def test_renewable_cluster_has_properties(self, local_study_with_renewable): + assert ( + local_study_with_renewable.get_areas()["fr"] + .get_renewables()["renewable cluster"] + .properties.model_dump(exclude_none=True) + ) + + def test_renewable_cluster_has_correct_default_properties( + self, local_study_with_renewable, default_renewable_cluster_properties + ): + assert ( + local_study_with_renewable.get_areas()["fr"].get_renewables()["renewable cluster"].properties + == default_renewable_cluster_properties + ) + + def test_renewables_list_ini_exists(self, local_study_with_renewable): + renewables_list_ini = ( + local_study_with_renewable.service.config.study_path + / IniFileTypes.RENEWABLES_LIST_INI.value.format(area_name="fr") + ) + assert renewables_list_ini.is_file() + + def test_renewable_list_ini_has_correct_default_values( + self, default_renewable_cluster_properties, actual_renewable_list_ini + ): + # Given + expected_renewables_list_ini_content = """[renewable cluster] +name = renewable cluster +group = Other RES 1 +enabled = true +nominalcapacity = 0.000000 +unitcount = 1 +ts-interpretation = power-generation + +""" + expected_renewables_list_ini = ConfigParser() + expected_renewables_list_ini.read_string(expected_renewables_list_ini_content) + + # When + with actual_renewable_list_ini.ini_path.open() as renewables_list_ini_file: + actual_renewable_list_ini_content = renewables_list_ini_file.read() + + assert actual_renewable_list_ini_content == expected_renewables_list_ini_content + assert actual_renewable_list_ini.parsed_ini.sections() == expected_renewables_list_ini.sections() + assert actual_renewable_list_ini.parsed_ini == expected_renewables_list_ini + + def test_renewable_cluster_and_ini_have_custom_properties(self, local_study_w_thermal, actual_renewable_list_ini): + # Given + custom_properties = RenewableClusterPropertiesLocal( + "renewable cluster", + RenewableClusterProperties( + group=RenewableClusterGroup.WIND_OFF_SHORE, + ts_interpretation=TimeSeriesInterpretation.PRODUCTION_FACTOR, + ), + ) + expected_renewables_list_ini_content = """[renewable cluster] +name = renewable cluster +group = Wind Offshore +enabled = true +nominalcapacity = 0.000000 +unitcount = 1 +ts-interpretation = production-factor + +""" + + # When + local_study_w_thermal.get_areas()["fr"].create_renewable_cluster( + renewable_name=custom_properties.renewable_name, + properties=custom_properties.yield_renewable_cluster_properties(), + series=None, + ) + with actual_renewable_list_ini.ini_path.open() as renewables_list_ini_file: + actual_renewable_list_ini_content = renewables_list_ini_file.read() + + assert ( + local_study_w_thermal.get_areas()["fr"].get_renewables()["renewable cluster"].properties + == custom_properties.yield_renewable_cluster_properties() + ) + assert actual_renewable_list_ini_content == expected_renewables_list_ini_content + + +class TestCreateSTStorage: + def test_can_create_st_storage(self, local_study_with_renewable): + # When + storage_name = "short term storage" + local_study_with_renewable.get_areas()["fr"].create_st_storage(storage_name) + + # Then + assert local_study_with_renewable.get_areas()["fr"].get_st_storages() + assert isinstance( + local_study_with_renewable.get_areas()["fr"].get_st_storages()[storage_name], + STStorage, + ) + + def test_storage_has_properties(self, local_study_with_st_storage): + assert ( + local_study_with_st_storage.get_areas()["fr"] + .get_st_storages()["short term storage"] + .properties.model_dump(exclude_none=True) + ) + + def test_storage_has_correct_default_properties(self, local_study_with_st_storage, default_st_storage_properties): + assert ( + local_study_with_st_storage.get_areas()["fr"].get_st_storages()["short term storage"].properties + == default_st_storage_properties + ) + + def test_st_storage_list_ini_exists(self, local_study_with_st_storage): + st_storage_list_ini = ( + local_study_with_st_storage.service.config.study_path + / IniFileTypes.ST_STORAGE_LIST_INI.value.format(area_name="fr") + ) + assert st_storage_list_ini.is_file() + + def test_st_storage_list_ini_has_correct_default_values( + self, default_st_storage_properties, actual_st_storage_list_ini + ): + # Given + expected_st_storage_list_ini_content = """[short term storage] +name = short term storage +group = Other1 +injectionnominalcapacity = 0.000000 +withdrawalnominalcapacity = 0.000000 +reservoircapacity = 0.000000 +efficiency = 1.000000 +initiallevel = 0.500000 +initialleveloptim = false +enabled = true + +""" + expected_st_storage_list_ini = ConfigParser() + expected_st_storage_list_ini.read_string(expected_st_storage_list_ini_content) + + # When + with actual_st_storage_list_ini.ini_path.open() as st_storage_list_ini_file: + actual_st_storage_list_ini_content = st_storage_list_ini_file.read() + + assert actual_st_storage_list_ini_content == expected_st_storage_list_ini_content + assert actual_st_storage_list_ini.parsed_ini.sections() == expected_st_storage_list_ini.sections() + assert actual_st_storage_list_ini.parsed_ini == expected_st_storage_list_ini + + def test_st_storage_and_ini_have_custom_properties(self, local_study_with_st_storage, actual_st_storage_list_ini): + # Given + custom_properties = STStoragePropertiesLocal( + "short term storage", + STStorageProperties(group=STStorageGroup.BATTERY, reservoir_capacity=12.345), + ) + expected_st_storage_list_ini_content = """[short term storage] +name = short term storage +group = Battery +injectionnominalcapacity = 0.000000 +withdrawalnominalcapacity = 0.000000 +reservoircapacity = 12.345000 +efficiency = 1.000000 +initiallevel = 0.500000 +initialleveloptim = false +enabled = true + +""" + + # When + local_study_with_st_storage.get_areas()["fr"].create_st_storage( + st_storage_name=custom_properties.st_storage_name, + properties=custom_properties.yield_st_storage_properties(), + ) + with actual_st_storage_list_ini.ini_path.open() as st_storage_list_ini_file: + actual_st_storage_list_ini_content = st_storage_list_ini_file.read() + + assert ( + local_study_with_st_storage.get_areas()["fr"].get_st_storages()["short term storage"].properties + == custom_properties.yield_st_storage_properties() + ) + assert actual_st_storage_list_ini_content == expected_st_storage_list_ini_content + + +class TestCreateHydro: + def test_can_create_hydro(self, local_study_with_st_storage): + # When + local_study_with_st_storage.get_areas()["fr"].create_hydro() + + # Then + assert local_study_with_st_storage.get_areas()["fr"].hydro + assert isinstance(local_study_with_st_storage.get_areas()["fr"].hydro, Hydro) + + def test_hydro_has_properties(self, local_study_w_areas): + assert local_study_w_areas.get_areas()["fr"].hydro.properties + + def test_hydro_has_correct_default_properties(self, local_study_w_areas, default_hydro_properties): + assert local_study_w_areas.get_areas()["fr"].hydro.properties == default_hydro_properties + + def test_hydro_ini_exists(self, local_study_w_areas): + hydro_ini = local_study_w_areas.service.config.study_path / IniFileTypes.HYDRO_INI.value + assert hydro_ini.is_file() + + def test_hydro_ini_has_correct_default_values(self, local_study_w_areas): + # Given + expected_hydro_ini_content = """[inter-daily-breakdown] +fr = 1.000000 +it = 1.000000 + +[intra-daily-modulation] +fr = 24.000000 +it = 24.000000 + +[inter-monthly-breakdown] +fr = 1.000000 +it = 1.000000 + +[reservoir] +fr = false +it = false + +[reservoir capacity] +fr = 0.000000 +it = 0.000000 + +[follow load] +fr = true +it = true + +[use water] +fr = false +it = false + +[hard bounds] +fr = false +it = false + +[initialize reservoir date] +fr = 0 +it = 0 + +[use heuristic] +fr = true +it = true + +[power to level] +fr = false +it = false + +[use leeway] +fr = false +it = false + +[leeway low] +fr = 1.000000 +it = 1.000000 + +[leeway up] +fr = 1.000000 +it = 1.000000 + +[pumping efficiency] +fr = 1.000000 +it = 1.000000 + +""" + expected_hydro_ini = ConfigParser() + expected_hydro_ini.read_string(expected_hydro_ini_content) + actual_hydro_ini = IniFile(local_study_w_areas.service.config.study_path, IniFileTypes.HYDRO_INI) + + # When + with actual_hydro_ini.ini_path.open() as st_storage_list_ini_file: + actual_hydro_ini_content = st_storage_list_ini_file.read() + + assert actual_hydro_ini_content == expected_hydro_ini_content + assert actual_hydro_ini.parsed_ini.sections() == expected_hydro_ini.sections() + assert actual_hydro_ini.parsed_ini == expected_hydro_ini + + def test_hydro_ini_has_correct_sorted_areas(self, actual_hydro_ini): + # Given + expected_hydro_ini_content = """[inter-daily-breakdown] +at = 1.000000 +fr = 1.000000 +it = 1.000000 + +[intra-daily-modulation] +at = 24.000000 +fr = 24.000000 +it = 24.000000 + +[inter-monthly-breakdown] +at = 1.000000 +fr = 1.000000 +it = 1.000000 + +[reservoir] +at = false +fr = false +it = false + +[reservoir capacity] +at = 0.000000 +fr = 0.000000 +it = 0.000000 + +[follow load] +at = true +fr = true +it = true + +[use water] +at = false +fr = false +it = false + +[hard bounds] +at = false +fr = false +it = false + +[initialize reservoir date] +at = 0 +fr = 0 +it = 0 + +[use heuristic] +at = true +fr = true +it = true + +[power to level] +at = false +fr = false +it = false + +[use leeway] +at = false +fr = false +it = false + +[leeway low] +at = 1.000000 +fr = 1.000000 +it = 1.000000 + +[leeway up] +at = 1.000000 +fr = 1.000000 +it = 1.000000 + +[pumping efficiency] +at = 1.000000 +fr = 1.000000 +it = 1.000000 + +""" + expected_hydro_ini = ConfigParser() + expected_hydro_ini.read_string(expected_hydro_ini_content) + + # When + with actual_hydro_ini.ini_path.open() as st_storage_list_ini_file: + actual_hydro_ini_content = st_storage_list_ini_file.read() + + assert actual_hydro_ini_content == expected_hydro_ini_content + assert actual_hydro_ini.parsed_ini.sections() == expected_hydro_ini.sections() + assert actual_hydro_ini.parsed_ini == expected_hydro_ini + + +class TestCreateReserves: + def test_can_create_reserves_ts_file(self, area_fr): + # Given + reserves_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.RESERVES.value.format( + area_id=area_fr.id + ) + expected_reserves_file_path = area_fr._area_service.config.study_path / "input/reserves/fr.txt" + + # When + area_fr.create_reserves(None) + + # Then + assert reserves_file_path == expected_reserves_file_path + assert reserves_file_path.exists() + assert reserves_file_path.is_file() + + def test_can_create_reserves_ts_file_with_time_series(self, area_fr): + # Given + reserves_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.RESERVES.value.format( + area_id=area_fr.id + ) + expected_time_series_string = """1.0\t1.0\t1.0 +1.0\t1.0\t1.0 +""" + expected_time_series = pd.read_csv(StringIO(expected_time_series_string), sep="\t", header=None) + + # When + area_fr.create_reserves(pd.DataFrame(np.ones([2, 3]))) + actual_time_series = pd.read_csv(reserves_file_path, sep="\t", header=None) + with reserves_file_path.open("r") as reserves_ts_file: + actual_time_series_string = reserves_ts_file.read() + + # Then + assert actual_time_series.equals(expected_time_series) + assert actual_time_series_string == expected_time_series_string + + +class TestCreateMiscGen: + def test_can_create_misc_gen_ts_file(self, area_fr): + # Given + misc_gen_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.MISC_GEN.value.format( + area_id=area_fr.id + ) + expected_misc_gen_file_path = area_fr._area_service.config.study_path / "input/misc-gen/miscgen-fr.txt" + + # When + area_fr.create_misc_gen(None) + + # Then + assert misc_gen_file_path == expected_misc_gen_file_path + assert misc_gen_file_path.exists() + assert misc_gen_file_path.is_file() + + def test_can_create_misc_gen_ts_file_with_time_series(self, area_fr): + # Given + misc_gen_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.MISC_GEN.value.format( + area_id=area_fr.id + ) + expected_time_series_string = """1.0\t1.0\t1.0 +1.0\t1.0\t1.0 +""" + expected_time_series = pd.read_csv(StringIO(expected_time_series_string), sep="\t", header=None) + + # When + area_fr.create_misc_gen(pd.DataFrame(np.ones([2, 3]))) + actual_time_series = pd.read_csv(misc_gen_file_path, sep="\t", header=None) + with misc_gen_file_path.open("r") as misc_gen_ts_file: + actual_time_series_string = misc_gen_ts_file.read() + + # Then + assert actual_time_series.equals(expected_time_series) + assert actual_time_series_string == expected_time_series_string + + +class TestCreateWind: + def test_can_create_wind_ts_file(self, area_fr): + # Given + wind_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.WIND.value.format( + area_id=area_fr.id + ) + expected_wind_file_path = area_fr._area_service.config.study_path / "input/wind/series/wind_fr.txt" + + # When + area_fr.create_wind(None) + + # Then + assert wind_file_path == expected_wind_file_path + assert wind_file_path.exists() + assert wind_file_path.is_file() + + def test_can_create_wind_ts_file_with_time_series(self, area_fr): + # Given + wind_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.WIND.value.format( + area_id=area_fr.id + ) + expected_time_series_string = """1.0\t1.0\t1.0 +1.0\t1.0\t1.0 +""" + expected_time_series = pd.read_csv(StringIO(expected_time_series_string), sep="\t", header=None) + + # When + area_fr.create_wind(pd.DataFrame(np.ones([2, 3]))) + actual_time_series = pd.read_csv(wind_file_path, sep="\t", header=None) + with wind_file_path.open("r") as wind_ts_file: + actual_time_series_string = wind_ts_file.read() + + # Then + assert actual_time_series.equals(expected_time_series) + assert actual_time_series_string == expected_time_series_string + + +class TestCreateSolar: + def test_can_create_solar_ts_file(self, area_fr): + # Given + solar_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.SOLAR.value.format( + area_id=area_fr.id + ) + expected_solar_file_path = area_fr._area_service.config.study_path / "input/solar/series/solar_fr.txt" + + # When + area_fr.create_solar(None) + + # Then + assert solar_file_path == expected_solar_file_path + assert solar_file_path.exists() + assert solar_file_path.is_file() + + def test_can_create_solar_ts_file_with_time_series(self, area_fr): + # Given + solar_file_path = area_fr._area_service.config.study_path / TimeSeriesFileType.SOLAR.value.format( + area_id=area_fr.id + ) + expected_time_series_string = """1.0\t1.0\t1.0 +1.0\t1.0\t1.0 +""" + expected_time_series = pd.read_csv(StringIO(expected_time_series_string), sep="\t", header=None) + + # When + area_fr.create_solar(pd.DataFrame(np.ones([2, 3]))) + actual_time_series = pd.read_csv(solar_file_path, sep="\t", header=None) + with solar_file_path.open("r") as solar_ts_file: + actual_time_series_string = solar_ts_file.read() + + # Then + assert actual_time_series.equals(expected_time_series) + assert actual_time_series_string == expected_time_series_string diff --git a/tests/antares/services/local_services/test_study.py b/tests/antares/services/local_services/test_study.py index 8a12a6a2..f52082c9 100644 --- a/tests/antares/services/local_services/test_study.py +++ b/tests/antares/services/local_services/test_study.py @@ -1,906 +1,938 @@ -import logging -import os -import time -from configparser import ConfigParser -from pathlib import Path - -import pytest - -from antares.config.local_configuration import LocalConfiguration -from antares.exceptions.exceptions import CustomError, LinkCreationError -from antares.model.area import AreaProperties, AreaUi, AreaUiLocal, AreaPropertiesLocal, Area -from antares.model.commons import FilterOption -from antares.model.hydro import Hydro -from antares.model.link import ( - Link, - LinkProperties, - LinkPropertiesLocal, - TransmissionCapacities, - LinkUi, - AssetType, - LinkStyle, - LinkUiLocal, -) -from antares.model.study import create_study_local -from antares.service.local_services.area_local import AreaLocalService -from antares.service.local_services.link_local import LinkLocalService -from antares.service.local_services.renewable_local import RenewableLocalService -from antares.service.local_services.st_storage_local import ShortTermStorageLocalService -from antares.service.local_services.thermal_local import ThermalLocalService - - -class TestCreateStudy: - def test_create_study_success(self, tmp_path, caplog): - # Given - study_name = "studyTest" - version = "850" - caplog.set_level(logging.INFO) - - expected_subdirectories = ["input", "layers", "output", "setting", "user"] - - expected_study_path = tmp_path / "studyTest" - - # When - create_study_local(study_name, version, LocalConfiguration(tmp_path, study_name)) - - # Then - assert os.path.exists(expected_study_path) - assert os.path.isdir(expected_study_path) - - for subdirectory in expected_subdirectories: - subdirectory_path = expected_study_path / subdirectory - assert subdirectory_path.exists() - assert subdirectory_path.is_dir() - - # Then - assert caplog.records[0].msg == f"Study successfully created: {study_name}" - - def test_desktop_ini_creation(self, tmp_path, local_study): - # Given - expected_desktop_path = tmp_path / local_study.name / "Desktop.ini" - desktop_ini_content = f"""[.ShellClassInfo] -IconFile = settings/resources/study.ico -IconIndex = 0 -InfoTip = Antares Study {local_study.version}: {local_study.name} -""" - - # When - with open(expected_desktop_path, "r") as file: - actual_content = file.read() - - # Then - assert actual_content == desktop_ini_content - assert expected_desktop_path.exists() - assert expected_desktop_path.is_file() - - def test_study_antares_content(self, monkeypatch, tmp_path): - # Given - study_name = "studyTest" - version = "850" - expected_study_antares_path = tmp_path / "studyTest/study.antares" - antares_content = f"""[antares] -version = {version} -caption = {study_name} -created = {"123"} -lastsave = {"123"} -author = Unknown -""" - - monkeypatch.setattr(time, "time", lambda: "123") - - # When - create_study_local(study_name, version, LocalConfiguration(tmp_path, study_name)) - with open(expected_study_antares_path, "r") as file: - actual_content = file.read() - - # Then - assert actual_content == antares_content - - def test_directory_not_exists_error(self, caplog): - # Given - local_path = Path("/fake/path/") - study_name = "study_name" - with caplog.at_level(logging.ERROR): - with pytest.raises(ValueError, match=f"Provided directory {local_path} does not exist."): - create_study_local(study_name, "880", LocalConfiguration(local_path, study_name)) - - def test_verify_study_already_exists_error(self, monkeypatch, tmp_path, caplog): - # Given - study_name = "studyTest" - version = "850" - - def mock_verify_study_already_exists(study_directory): - raise FileExistsError(f"Failed to create study. Study {study_directory} already exists") - - monkeypatch.setattr("antares.model.study._verify_study_already_exists", mock_verify_study_already_exists) - - # When - with caplog.at_level(logging.ERROR): - with pytest.raises( - FileExistsError, match=f"Failed to create study. Study {tmp_path}/{study_name} already exists" - ): - create_study_local(study_name, version, LocalConfiguration(tmp_path, study_name)) - - -class TestCreateArea: - def test_areas_sets_ini_content(self, tmp_path, local_study): - # Given - expected_sets_path = tmp_path / local_study.name / "input" / "areas" / "sets.ini" - - expected_sets_ini_content = """[all areas] -caption = All areas -comments = Spatial aggregates on all areas -output = false -apply-filter = add-all - -""" - - # When - local_study.create_area("area_test") - - with open(expected_sets_path, "r") as file: - actual_content = file.read() - - # Then - assert actual_content == expected_sets_ini_content - - def test_areas_list_txt_content(self, tmp_path, caplog, local_study): - # Given - study_antares_path = tmp_path / local_study.name - caplog.set_level(logging.INFO) - - expected_list_txt = study_antares_path / "input" / "areas" / "list.txt" - - expected_list_txt_content = """area1 -area2 -""" - - # When - local_study.create_area("area1") - local_study.create_area("area2") - - with open(expected_list_txt, "r") as file: - actual_content = file.read() - - # Then - assert actual_content == expected_list_txt_content - assert caplog.records[0].msg == "Area area1 created successfully!" - assert caplog.records[1].msg == "Area area2 created successfully!" - - def test_areas_list_sorted_alphabetically(self, tmp_path, local_study): - # Given - areas_to_create = ["ghi", "fr", "at", "def", "abc"] - expected_list_txt = tmp_path / local_study.name / "input" / "areas" / "list.txt" - expected_list_txt_content = """abc -at -def -fr -ghi -""" - - # When - for area in areas_to_create: - local_study.create_area(area) - - with open(expected_list_txt, "r") as file: - actual_content = file.read() - - assert actual_content == expected_list_txt_content - - def test_area_optimization_ini_content(self, tmp_path, local_study): - # Given - study_antares_path = tmp_path / local_study.name - - expected_optimization_ini_path = study_antares_path / "input" / "areas" / "area1" / "optimization.ini" - - expected_optimization_ini_content = """[nodal optimization] -non-dispatchable-power = true -dispatchable-hydro-power = true -other-dispatchable-power = true -spread-unsupplied-energy-cost = 0.000000 -spread-spilled-energy-cost = 0.000000 -average-unsupplied-energy-cost = 0.000000 -average-spilled-energy-cost = 0.000000 - -[filtering] -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, daily, weekly, monthly, annual - -""" - - expected_optimization_ini = ConfigParser() - expected_optimization_ini.read_string(expected_optimization_ini_content) - - # When - local_study.create_area("area1") - - actual_optimization_ini = ConfigParser() - with open(expected_optimization_ini_path, "r") as file: - actual_optimization_ini.read_file(file) - file.seek(0) - actual_optimization_ini_content = file.read() - - # Then - assert actual_optimization_ini == expected_optimization_ini - assert actual_optimization_ini_content == expected_optimization_ini_content - - def test_custom_area_optimization_ini_content(self, tmp_path, local_study): - # Given - area_to_create = "area1" - area_properties = AreaProperties( - dispatch_hydro_power=False, - energy_cost_unsupplied=1.04, - energy_cost_spilled=1, - filter_by_year={FilterOption.ANNUAL, FilterOption.HOURLY, FilterOption.WEEKLY, FilterOption.HOURLY}, - ) - expected_optimization_ini = ConfigParser() - actual_optimization_ini = ConfigParser() - expected_optimization_ini_path = ( - tmp_path / local_study.name / "input/areas" / area_to_create / "optimization.ini" - ) - expected_optimization_ini_content = """[nodal optimization] -non-dispatchable-power = true -dispatchable-hydro-power = false -other-dispatchable-power = true -spread-unsupplied-energy-cost = 0.000000 -spread-spilled-energy-cost = 0.000000 -average-unsupplied-energy-cost = 1.040000 -average-spilled-energy-cost = 1.000000 - -[filtering] -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, weekly, annual - -""" - - # When - local_study.create_area(area_to_create, properties=area_properties) - expected_optimization_ini.read_string(expected_optimization_ini_content) - - with open(expected_optimization_ini_path, "r") as file: - actual_optimization_ini.read_file(file) - file.seek(0) - actual_optimization_ini_content = file.read() - - assert actual_optimization_ini == expected_optimization_ini - assert actual_optimization_ini_content == expected_optimization_ini_content - - def test_area_ui_ini_content(self, tmp_path, local_study): - # Given - study_antares_path = tmp_path / local_study.name - - expected_ui_ini_path = study_antares_path / "input" / "areas" / "area1" / "ui.ini" - - ui_ini_content = """[ui] -x = 0 -y = 0 -color_r = 230 -color_g = 108 -color_b = 44 -layers = 0 - -[layerX] -0 = 0 - -[layerY] -0 = 0 - -[layerColor] -0 = 230 , 108 , 44 - -""" - - # When - local_study.create_area("area1") - - with open(expected_ui_ini_path, "r") as file: - actual_content = file.read() - - # Then - assert actual_content == ui_ini_content - - def test_create_area_with_custom_error(self, monkeypatch, caplog, local_study): - # Given - caplog.set_level(logging.INFO) - - def mock_error_in_sets_ini(): - raise CustomError("An error occurred while processing area can not be created") - - monkeypatch.setattr("antares.service.local_services.area_local._sets_ini_content", mock_error_in_sets_ini) - with pytest.raises(CustomError, match="An error occurred while processing area can not be created"): - local_study.create_area("test") - - def test_create_area_with_custom_ui(self, tmp_path, local_study): - # Given - study_antares_path = tmp_path / local_study.name - # TODO: This should've been local_study._service.path, but ABCService doesn't have path - - area = "area1" - ui_ini_path = study_antares_path / "input" / "areas" / area / "ui.ini" - area_ui = AreaUi(x=123, y=321, color_rgb=[255, 230, 210]) - - # When - local_study.create_area(area, ui=area_ui) - - expected_content = """[ui] -x = 123 -y = 321 -color_r = 255 -color_g = 230 -color_b = 210 -layers = 0 - -[layerX] -0 = 123 - -[layerY] -0 = 321 - -[layerColor] -0 = 255 , 230 , 210 - -""" - - with open(ui_ini_path, "r") as file: - actual_content = file.read() - - assert actual_content == expected_content - - def test_created_area_has_ui(self, tmp_path, local_study): - # Given - area = "area1" - area_ui = AreaUiLocal(AreaUi(x=123, y=321, color_rgb=[255, 230, 210])).yield_area_ui() - - # When - local_study.create_area(area, ui=area_ui) - assert local_study.get_areas()[area].ui == area_ui - - def test_areas_have_default_properties(self, tmp_path, local_study_w_areas): - # Given - expected_default_properties = { - "nodal_optimization": { - "non-dispatchable-power": "true", - "dispatchable-hydro-power": "true", - "other-dispatchable-power": "true", - "spread-unsupplied-energy-cost": "0.000000", - "spread-spilled-energy-cost": "0.000000", - "average-unsupplied-energy-cost": "0.000000", - "average-spilled-energy-cost": "0.000000", - }, - "filtering": { - "filter-synthesis": "hourly, daily, weekly, monthly, annual", - "filter-year-by-year": "hourly, daily, weekly, monthly, annual", - }, - } - - # When - actual_area_properties = local_study_w_areas.get_areas()["fr"].properties - actual_properties = AreaPropertiesLocal(actual_area_properties).model_dump(exclude_none=True) - - assert expected_default_properties == actual_properties - - def test_areas_with_custom_properties(self, tmp_path, local_study): - # Given - area_to_create = "fr" - area_properties = AreaProperties( - dispatch_hydro_power=False, - spread_unsupplied_energy_cost=1, - energy_cost_spilled=3.5, - filter_by_year={FilterOption.ANNUAL, FilterOption.ANNUAL, FilterOption.HOURLY, FilterOption.WEEKLY}, - ) - expected_properties = { - "nodal_optimization": { - "non-dispatchable-power": "true", - "dispatchable-hydro-power": "false", - "other-dispatchable-power": "true", - "spread-unsupplied-energy-cost": "1.000000", - "spread-spilled-energy-cost": "0.000000", - "average-unsupplied-energy-cost": "0.000000", - "average-spilled-energy-cost": "3.500000", - }, - "filtering": { - "filter-synthesis": "hourly, daily, weekly, monthly, annual", - "filter-year-by-year": "hourly, weekly, annual", - }, - } - - # When - created_area = local_study.create_area(area_name=area_to_create, properties=area_properties) - actual_properties = AreaPropertiesLocal(created_area.properties).model_dump(exclude_none=True) - - assert expected_properties == actual_properties - - def test_areas_ini_has_correct_sections(self, actual_thermal_areas_ini): - # Given - expected_areas_ini_sections = ["unserverdenergycost", "spilledenergycost"] - - # Then - assert actual_thermal_areas_ini.parsed_ini.sections() == expected_areas_ini_sections - - def test_areas_ini_has_correct_default_content(self, actual_thermal_areas_ini): - # Given - expected_areas_ini_contents = """[unserverdenergycost] -fr = 0.000000 -it = 0.000000 -at = 0.000000 - -[spilledenergycost] -fr = 0.000000 -it = 0.000000 -at = 0.000000 - -""" - expected_areas_ini = ConfigParser() - expected_areas_ini.read_string(expected_areas_ini_contents) - - # When - with actual_thermal_areas_ini.ini_path.open("r") as areas_ini_file: - actual_areas_ini_contents = areas_ini_file.read() - - # Then - assert actual_areas_ini_contents == expected_areas_ini_contents - assert actual_thermal_areas_ini.parsed_ini.sections() == expected_areas_ini.sections() - assert actual_thermal_areas_ini.parsed_ini == expected_areas_ini - - def test_adequacy_patch_ini_has_correct_section(self, actual_adequacy_patch_ini): - expected_sections = ["adequacy-patch"] - assert actual_adequacy_patch_ini.parsed_ini.sections() == expected_sections - - def test_adequacy_patch_ini_has_correct_content(self, actual_adequacy_patch_ini): - # Given - expected_content = """[adequacy-patch] -adequacy-patch-mode = outside - -""" - expected_ini = ConfigParser() - expected_ini.read_string(expected_content) - - # When - with actual_adequacy_patch_ini.ini_path.open("r") as adequacy_patch_ini_file: - actual_content = adequacy_patch_ini_file.read() - - assert actual_content == expected_content - assert actual_adequacy_patch_ini.parsed_ini.sections() == expected_ini.sections() - assert actual_adequacy_patch_ini.parsed_ini == expected_ini - - def test_created_area_has_hydro(self, local_study_w_areas): - assert local_study_w_areas.get_areas()["fr"].hydro - assert isinstance(local_study_w_areas.get_areas()["it"].hydro, Hydro) - - -class TestCreateLink: - def test_create_link(self, tmp_path, local_study_w_areas): - # Given - link_to_create = "fr_it" - - # When - area_from, area_to = link_to_create.split("_") - link_created = local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - existing_areas=local_study_w_areas.get_areas(), - ) - - assert isinstance(link_created, Link) - - def test_unknown_area_errors(self, tmp_path, local_study_w_areas): - # Given - link_to_create = "es_fr" - fake_study_name = "nonExistantStudy" - fake_config = LocalConfiguration(Path("/fake/path"), fake_study_name) - - # When - area_from, area_to = link_to_create.split("_") - area_from = Area( - name=area_from, - area_service=AreaLocalService(fake_config, fake_study_name), - storage_service=ShortTermStorageLocalService(fake_config, fake_study_name), - thermal_service=ThermalLocalService(fake_config, fake_study_name), - renewable_service=RenewableLocalService(fake_config, fake_study_name), - ) - area_to = local_study_w_areas.get_areas()[area_to] - - with pytest.raises( - LinkCreationError, - match=f"Could not create the link {area_from.name} / {area_to.name}: {area_from.name} does not exist", - ): - local_study_w_areas.create_link( - area_from=area_from, area_to=area_to, existing_areas=local_study_w_areas.get_areas() - ) - - def test_study_areas_not_provided_errors(self, tmp_path, local_study_w_areas): - # With - area_from = local_study_w_areas.get_areas()["fr"] - area_to = local_study_w_areas.get_areas()["it"] - test_service = LinkLocalService( - local_study_w_areas.service.config, - local_study_w_areas.name, - ) - - with pytest.raises( - LinkCreationError, - match=f"Could not create the link {area_from.name} / {area_to.name}: Cannot verify existing areas.", - ): - test_service.create_link( - area_from=area_from, - area_to=area_to, - existing_areas=None, - ) - - def test_create_link_alphabetically(self, tmp_path, local_study): - # Given - areas_to_create = ["fr", "at"] - for area in areas_to_create: - local_study.create_area(area) - link_to_create = "fr_at" - - # When - area_from, area_to = link_to_create.split("_") - link_created = local_study.create_link( - area_from=local_study.get_areas()[area_from], - area_to=local_study.get_areas()[area_to], - existing_areas=local_study.get_areas(), - ) - - assert link_created.area_from.name == "at" - assert link_created.area_to.name == "fr" - - def test_create_link_sets_ini_content(self, tmp_path, local_study_w_areas): - # Given - link_to_create = "fr_it" - expected_content = """[it] -hurdles-cost = false -loop-flow = false -use-phase-shifter = false -transmission-capacities = enabled -asset-type = ac -link-style = plain -link-width = 1 -colorr = 112 -colorg = 112 -colorb = 112 -display-comments = true -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, daily, weekly, monthly, annual - -""" - - # When - area_from, area_to = link_to_create.split("_") - - local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - existing_areas=local_study_w_areas.get_areas(), - ) - - ini_file = tmp_path / local_study_w_areas.name / "input/links" / area_from / "properties.ini" - with open(ini_file, "r") as file: - actual_content = file.read() - - assert actual_content == expected_content - - def test_created_link_has_default_local_properties(self, tmp_path, local_study_w_areas): - # Given - link_to_create = "fr_it" - expected_ini_content = """[it] -hurdles-cost = false -loop-flow = false -use-phase-shifter = false -transmission-capacities = enabled -asset-type = ac -link-style = plain -link-width = 1 -colorr = 112 -colorg = 112 -colorb = 112 -display-comments = true -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, daily, weekly, monthly, annual - -""" - expected_ini = ConfigParser() - expected_ini.read_string(expected_ini_content) - default_properties = LinkPropertiesLocal(LinkProperties()).yield_link_properties() - - # When - area_from, area_to = link_to_create.split("_") - created_link = local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - existing_areas=local_study_w_areas.get_areas(), - ) - ini_file = tmp_path / local_study_w_areas.name / "input/links" / area_from / "properties.ini" - actual_ini = ConfigParser() - with open(ini_file, "r") as file: - actual_ini.read_file(file) - file.seek(0) - actual_ini_content = file.read() - - assert isinstance(created_link.properties, LinkProperties) - assert created_link.properties.model_dump(exclude_none=True) - assert created_link.properties == default_properties - assert actual_ini == expected_ini - assert actual_ini_content == expected_ini_content - - def test_created_link_has_custom_properties(self, tmp_path, local_study_w_areas): - # Given - link_to_create = "fr_it" - link_properties = LinkProperties( - loop_flow=True, - use_phase_shifter=True, - transmission_capacities=TransmissionCapacities.INFINITE, - filter_year_by_year={FilterOption.WEEKLY, FilterOption.DAILY}, - ) - expected_ini_content = """[it] -hurdles-cost = false -loop-flow = true -use-phase-shifter = true -transmission-capacities = infinite -asset-type = ac -link-style = plain -link-width = 1 -colorr = 112 -colorg = 112 -colorb = 112 -display-comments = true -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = daily, weekly - -""" - expected_ini = ConfigParser() - expected_ini.read_string(expected_ini_content) - - # When - area_from, area_to = link_to_create.split("_") - link_created = local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - properties=link_properties, - existing_areas=local_study_w_areas.get_areas(), - ) - created_ini_file = tmp_path / local_study_w_areas.name / "input/links" / area_from / "properties.ini" - actual_ini = ConfigParser() - with open(created_ini_file, "r") as file: - actual_ini.read_file(file) - file.seek(0) - actual_ini_content = file.read() - - # Then - assert actual_ini_content == expected_ini_content - assert link_created.properties == LinkPropertiesLocal(link_properties).yield_link_properties() - assert expected_ini == actual_ini - - def test_multiple_links_created_from_same_area(self, tmp_path, local_study_w_areas): - # Given - local_study_w_areas.create_area("at") - links_to_create = ["fr_at", "at_it"] - expected_ini_string = """[fr] -hurdles-cost = false -loop-flow = false -use-phase-shifter = false -transmission-capacities = enabled -asset-type = ac -link-style = plain -link-width = 1 -colorr = 112 -colorg = 112 -colorb = 112 -display-comments = true -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, daily, weekly, monthly, annual - -[it] -hurdles-cost = false -loop-flow = false -use-phase-shifter = false -transmission-capacities = enabled -asset-type = ac -link-style = plain -link-width = 1 -colorr = 112 -colorg = 112 -colorb = 112 -display-comments = true -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, daily, weekly, monthly, annual - -""" - expected_ini = ConfigParser() - expected_ini.read_string(expected_ini_string) - properties_ini_file = tmp_path / local_study_w_areas.name / "input/links" / "at" / "properties.ini" - - # When - for link in links_to_create: - area_from, area_to = link.split("_") - local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - existing_areas=local_study_w_areas.get_areas(), - ) - - # Then - actual_ini = ConfigParser() - with open(properties_ini_file, "r") as file: - actual_ini.read_file(file) - file.seek(0) - actual_ini_string = file.read() - - for section in expected_ini.sections(): - assert actual_ini.has_section(section) - - assert actual_ini == expected_ini - assert actual_ini_string == expected_ini_string - - def test_multiple_links_created_from_same_area_are_alphabetical(self, tmp_path, local_study_w_areas): - # Given - local_study_w_areas.create_area("at") - links_to_create = ["at_it", "fr_at"] - expected_ini_string = """[fr] -hurdles-cost = false -loop-flow = false -use-phase-shifter = false -transmission-capacities = enabled -asset-type = ac -link-style = plain -link-width = 1 -colorr = 112 -colorg = 112 -colorb = 112 -display-comments = true -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, daily, weekly, monthly, annual - -[it] -hurdles-cost = false -loop-flow = false -use-phase-shifter = false -transmission-capacities = enabled -asset-type = ac -link-style = plain -link-width = 1 -colorr = 112 -colorg = 112 -colorb = 112 -display-comments = true -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, daily, weekly, monthly, annual - -""" - expected_ini = ConfigParser() - expected_ini.read_string(expected_ini_string) - properties_ini_file = tmp_path / local_study_w_areas.name / "input/links" / "at" / "properties.ini" - - # When - for link in links_to_create: - area_from, area_to = link.split("_") - local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - existing_areas=local_study_w_areas.get_areas(), - ) - - # Then - actual_ini = ConfigParser() - with open(properties_ini_file, "r") as file: - actual_ini.read_file(file) - file.seek(0) - actual_ini_string = file.read() - - assert actual_ini == expected_ini - assert actual_ini_string == expected_ini_string - - def test_duplicate_links_raises_error(self, tmp_path, local_study_w_links): - # Given - link_to_create = "fr_it" - - # When - area_from, area_to = link_to_create.split("_") - - # Then - with pytest.raises( - CustomError, - match="""Link exists already, section already exists in properties.ini: - -Section 'it' already exists""", - ): - local_study_w_links.create_link( - area_from=local_study_w_links.get_areas()[area_from], - area_to=local_study_w_links.get_areas()[area_to], - existing_areas=local_study_w_links.get_areas(), - ) - - def test_created_link_has_default_ui_values(self, tmp_path, local_study_w_areas): - # Given - link_to_create = "fr / it" - actual_ini_file = tmp_path / local_study_w_areas.name / "input" / "links" / "fr" / "properties.ini" - actual_ini = ConfigParser() - expected_ini_string = """[it] -hurdles-cost = false -loop-flow = false -use-phase-shifter = false -transmission-capacities = enabled -asset-type = ac -link-style = plain -link-width = 1 -colorr = 112 -colorg = 112 -colorb = 112 -display-comments = true -filter-synthesis = hourly, daily, weekly, monthly, annual -filter-year-by-year = hourly, daily, weekly, monthly, annual - -""" - expected_ini = ConfigParser() - expected_ini.read_string(expected_ini_string) - - # When - area_from, area_to = link_to_create.split(" / ") - local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - existing_areas=local_study_w_areas.get_areas(), - ) - with open(actual_ini_file, "r") as file: - actual_ini.read_file(file) - file.seek(0) - actual_ini_string = file.read() - - # Then - assert isinstance(local_study_w_areas.get_links()[link_to_create].ui, LinkUi) - assert actual_ini == expected_ini - assert actual_ini_string == expected_ini_string - - def test_created_link_with_custom_ui_values(self, tmp_path, local_study_w_areas): - # Given - link_to_create = "fr / it" - actual_ini_file = tmp_path / local_study_w_areas.name / "input" / "links" / "fr" / "properties.ini" - actual_ini = ConfigParser() - expected_ini_string = """[it] -hurdles-cost = true -loop-flow = false -use-phase-shifter = false -transmission-capacities = ignore -asset-type = gaz -link-style = dot -link-width = 1 -colorr = 234 -colorg = 123 -colorb = 0 -display-comments = true -filter-synthesis = hourly, weekly, monthly -filter-year-by-year = hourly, daily, weekly, monthly, annual - -""" - expected_ini = ConfigParser() - expected_ini.read_string(expected_ini_string) - expected_properties = LinkProperties( - hurdles_cost=True, - transmission_capacities=TransmissionCapacities.DISABLED, - asset_type=AssetType.GAZ, - filter_synthesis={FilterOption.MONTHLY, FilterOption.HOURLY, FilterOption.WEEKLY}, - ) - expected_ui = LinkUi(link_style=LinkStyle.DOT, colorr=234, colorg=123, colorb=0) - - # When - area_from, area_to = link_to_create.split(" / ") - created_link = local_study_w_areas.create_link( - area_from=local_study_w_areas.get_areas()[area_from], - area_to=local_study_w_areas.get_areas()[area_to], - properties=expected_properties, - ui=expected_ui, - existing_areas=local_study_w_areas.get_areas(), - ) - with open(actual_ini_file, "r") as file: - actual_ini.read_file(file) - file.seek(0) - actual_ini_string = file.read() - actual_properties = created_link.properties - actual_ui = created_link.ui - - # Then - assert isinstance(local_study_w_areas.get_links()[link_to_create].ui, LinkUi) - assert actual_ini == expected_ini - assert actual_ini_string == expected_ini_string - assert actual_properties == LinkPropertiesLocal(expected_properties).yield_link_properties() - assert actual_ui == LinkUiLocal(expected_ui).yield_link_ui() +import logging +import os +import time +from configparser import ConfigParser +from pathlib import Path + +import pytest + +from antares.config.local_configuration import LocalConfiguration +from antares.exceptions.exceptions import CustomError, LinkCreationError +from antares.model.area import ( + AreaProperties, + AreaUi, + AreaUiLocal, + AreaPropertiesLocal, + Area, +) +from antares.model.commons import FilterOption +from antares.model.hydro import Hydro +from antares.model.link import ( + Link, + LinkProperties, + LinkPropertiesLocal, + TransmissionCapacities, + LinkUi, + AssetType, + LinkStyle, + LinkUiLocal, +) +from antares.model.study import create_study_local +from antares.service.local_services.area_local import AreaLocalService +from antares.service.local_services.link_local import LinkLocalService +from antares.service.local_services.renewable_local import RenewableLocalService +from antares.service.local_services.st_storage_local import ShortTermStorageLocalService +from antares.service.local_services.thermal_local import ThermalLocalService + + +class TestCreateStudy: + def test_create_study_success(self, tmp_path, caplog): + # Given + study_name = "studyTest" + version = "850" + caplog.set_level(logging.INFO) + + expected_subdirectories = ["input", "layers", "output", "setting", "user"] + + expected_study_path = tmp_path / "studyTest" + + # When + create_study_local(study_name, version, LocalConfiguration(tmp_path, study_name)) + + # Then + assert os.path.exists(expected_study_path) + assert os.path.isdir(expected_study_path) + + for subdirectory in expected_subdirectories: + subdirectory_path = expected_study_path / subdirectory + assert subdirectory_path.exists() + assert subdirectory_path.is_dir() + + # Then + assert caplog.records[0].msg == f"Study successfully created: {study_name}" + + def test_desktop_ini_creation(self, tmp_path, local_study): + # Given + expected_desktop_path = tmp_path / local_study.name / "Desktop.ini" + desktop_ini_content = f"""[.ShellClassInfo] +IconFile = settings/resources/study.ico +IconIndex = 0 +InfoTip = Antares Study {local_study.version}: {local_study.name} +""" + + # When + with open(expected_desktop_path, "r") as file: + actual_content = file.read() + + # Then + assert actual_content == desktop_ini_content + assert expected_desktop_path.exists() + assert expected_desktop_path.is_file() + + def test_study_antares_content(self, monkeypatch, tmp_path): + # Given + study_name = "studyTest" + version = "850" + expected_study_antares_path = tmp_path / "studyTest/study.antares" + antares_content = f"""[antares] +version = {version} +caption = {study_name} +created = {"123"} +lastsave = {"123"} +author = Unknown +""" + + monkeypatch.setattr(time, "time", lambda: "123") + + # When + create_study_local(study_name, version, LocalConfiguration(tmp_path, study_name)) + with open(expected_study_antares_path, "r") as file: + actual_content = file.read() + + # Then + assert actual_content == antares_content + + def test_directory_not_exists_error(self, caplog): + # Given + local_path = Path("/fake/path/") + study_name = "study_name" + with caplog.at_level(logging.ERROR): + with pytest.raises(ValueError, match=f"Provided directory {local_path} does not exist."): + create_study_local(study_name, "880", LocalConfiguration(local_path, study_name)) + + def test_verify_study_already_exists_error(self, monkeypatch, tmp_path, caplog): + # Given + study_name = "studyTest" + version = "850" + + def mock_verify_study_already_exists(study_directory): + raise FileExistsError(f"Failed to create study. Study {study_directory} already exists") + + monkeypatch.setattr( + "antares.model.study._verify_study_already_exists", + mock_verify_study_already_exists, + ) + + # When + with caplog.at_level(logging.ERROR): + with pytest.raises( + FileExistsError, + match=f"Failed to create study. Study {tmp_path}/{study_name} already exists", + ): + create_study_local(study_name, version, LocalConfiguration(tmp_path, study_name)) + + +class TestCreateArea: + def test_areas_sets_ini_content(self, tmp_path, local_study): + # Given + expected_sets_path = tmp_path / local_study.name / "input" / "areas" / "sets.ini" + + expected_sets_ini_content = """[all areas] +caption = All areas +comments = Spatial aggregates on all areas +output = false +apply-filter = add-all + +""" + + # When + local_study.create_area("area_test") + + with open(expected_sets_path, "r") as file: + actual_content = file.read() + + # Then + assert actual_content == expected_sets_ini_content + + def test_areas_list_txt_content(self, tmp_path, caplog, local_study): + # Given + study_antares_path = tmp_path / local_study.name + caplog.set_level(logging.INFO) + + expected_list_txt = study_antares_path / "input" / "areas" / "list.txt" + + expected_list_txt_content = """area1 +area2 +""" + + # When + local_study.create_area("area1") + local_study.create_area("area2") + + with open(expected_list_txt, "r") as file: + actual_content = file.read() + + # Then + assert actual_content == expected_list_txt_content + assert caplog.records[0].msg == "Area area1 created successfully!" + assert caplog.records[1].msg == "Area area2 created successfully!" + + def test_areas_list_sorted_alphabetically(self, tmp_path, local_study): + # Given + areas_to_create = ["ghi", "fr", "at", "def", "abc"] + expected_list_txt = tmp_path / local_study.name / "input" / "areas" / "list.txt" + expected_list_txt_content = """abc +at +def +fr +ghi +""" + + # When + for area in areas_to_create: + local_study.create_area(area) + + with open(expected_list_txt, "r") as file: + actual_content = file.read() + + assert actual_content == expected_list_txt_content + + def test_area_optimization_ini_content(self, tmp_path, local_study): + # Given + study_antares_path = tmp_path / local_study.name + + expected_optimization_ini_path = study_antares_path / "input" / "areas" / "area1" / "optimization.ini" + + expected_optimization_ini_content = """[nodal optimization] +non-dispatchable-power = true +dispatchable-hydro-power = true +other-dispatchable-power = true +spread-unsupplied-energy-cost = 0.000000 +spread-spilled-energy-cost = 0.000000 +average-unsupplied-energy-cost = 0.000000 +average-spilled-energy-cost = 0.000000 + +[filtering] +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, daily, weekly, monthly, annual + +""" + + expected_optimization_ini = ConfigParser() + expected_optimization_ini.read_string(expected_optimization_ini_content) + + # When + local_study.create_area("area1") + + actual_optimization_ini = ConfigParser() + with open(expected_optimization_ini_path, "r") as file: + actual_optimization_ini.read_file(file) + file.seek(0) + actual_optimization_ini_content = file.read() + + # Then + assert actual_optimization_ini == expected_optimization_ini + assert actual_optimization_ini_content == expected_optimization_ini_content + + def test_custom_area_optimization_ini_content(self, tmp_path, local_study): + # Given + area_to_create = "area1" + area_properties = AreaProperties( + dispatch_hydro_power=False, + energy_cost_unsupplied=1.04, + energy_cost_spilled=1, + filter_by_year={ + FilterOption.ANNUAL, + FilterOption.HOURLY, + FilterOption.WEEKLY, + FilterOption.HOURLY, + }, + ) + expected_optimization_ini = ConfigParser() + actual_optimization_ini = ConfigParser() + expected_optimization_ini_path = ( + tmp_path / local_study.name / "input/areas" / area_to_create / "optimization.ini" + ) + expected_optimization_ini_content = """[nodal optimization] +non-dispatchable-power = true +dispatchable-hydro-power = false +other-dispatchable-power = true +spread-unsupplied-energy-cost = 0.000000 +spread-spilled-energy-cost = 0.000000 +average-unsupplied-energy-cost = 1.040000 +average-spilled-energy-cost = 1.000000 + +[filtering] +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, weekly, annual + +""" + + # When + local_study.create_area(area_to_create, properties=area_properties) + expected_optimization_ini.read_string(expected_optimization_ini_content) + + with open(expected_optimization_ini_path, "r") as file: + actual_optimization_ini.read_file(file) + file.seek(0) + actual_optimization_ini_content = file.read() + + assert actual_optimization_ini == expected_optimization_ini + assert actual_optimization_ini_content == expected_optimization_ini_content + + def test_area_ui_ini_content(self, tmp_path, local_study): + # Given + study_antares_path = tmp_path / local_study.name + + expected_ui_ini_path = study_antares_path / "input" / "areas" / "area1" / "ui.ini" + + ui_ini_content = """[ui] +x = 0 +y = 0 +color_r = 230 +color_g = 108 +color_b = 44 +layers = 0 + +[layerX] +0 = 0 + +[layerY] +0 = 0 + +[layerColor] +0 = 230 , 108 , 44 + +""" + + # When + local_study.create_area("area1") + + with open(expected_ui_ini_path, "r") as file: + actual_content = file.read() + + # Then + assert actual_content == ui_ini_content + + def test_create_area_with_custom_error(self, monkeypatch, caplog, local_study): + # Given + caplog.set_level(logging.INFO) + + def mock_error_in_sets_ini(): + raise CustomError("An error occurred while processing area can not be created") + + monkeypatch.setattr( + "antares.service.local_services.area_local._sets_ini_content", + mock_error_in_sets_ini, + ) + with pytest.raises( + CustomError, + match="An error occurred while processing area can not be created", + ): + local_study.create_area("test") + + def test_create_area_with_custom_ui(self, tmp_path, local_study): + # Given + study_antares_path = tmp_path / local_study.name + # TODO: This should've been local_study._service.path, but ABCService doesn't have path + + area = "area1" + ui_ini_path = study_antares_path / "input" / "areas" / area / "ui.ini" + area_ui = AreaUi(x=123, y=321, color_rgb=[255, 230, 210]) + + # When + local_study.create_area(area, ui=area_ui) + + expected_content = """[ui] +x = 123 +y = 321 +color_r = 255 +color_g = 230 +color_b = 210 +layers = 0 + +[layerX] +0 = 123 + +[layerY] +0 = 321 + +[layerColor] +0 = 255 , 230 , 210 + +""" + + with open(ui_ini_path, "r") as file: + actual_content = file.read() + + assert actual_content == expected_content + + def test_created_area_has_ui(self, tmp_path, local_study): + # Given + area = "area1" + area_ui = AreaUiLocal(AreaUi(x=123, y=321, color_rgb=[255, 230, 210])).yield_area_ui() + + # When + local_study.create_area(area, ui=area_ui) + assert local_study.get_areas()[area].ui == area_ui + + def test_areas_have_default_properties(self, tmp_path, local_study_w_areas): + # Given + expected_default_properties = { + "nodal_optimization": { + "non-dispatchable-power": "true", + "dispatchable-hydro-power": "true", + "other-dispatchable-power": "true", + "spread-unsupplied-energy-cost": "0.000000", + "spread-spilled-energy-cost": "0.000000", + "average-unsupplied-energy-cost": "0.000000", + "average-spilled-energy-cost": "0.000000", + }, + "filtering": { + "filter-synthesis": "hourly, daily, weekly, monthly, annual", + "filter-year-by-year": "hourly, daily, weekly, monthly, annual", + }, + } + + # When + actual_area_properties = local_study_w_areas.get_areas()["fr"].properties + actual_properties = AreaPropertiesLocal(actual_area_properties).model_dump(exclude_none=True) + + assert expected_default_properties == actual_properties + + def test_areas_with_custom_properties(self, tmp_path, local_study): + # Given + area_to_create = "fr" + area_properties = AreaProperties( + dispatch_hydro_power=False, + spread_unsupplied_energy_cost=1, + energy_cost_spilled=3.5, + filter_by_year={ + FilterOption.ANNUAL, + FilterOption.ANNUAL, + FilterOption.HOURLY, + FilterOption.WEEKLY, + }, + ) + expected_properties = { + "nodal_optimization": { + "non-dispatchable-power": "true", + "dispatchable-hydro-power": "false", + "other-dispatchable-power": "true", + "spread-unsupplied-energy-cost": "1.000000", + "spread-spilled-energy-cost": "0.000000", + "average-unsupplied-energy-cost": "0.000000", + "average-spilled-energy-cost": "3.500000", + }, + "filtering": { + "filter-synthesis": "hourly, daily, weekly, monthly, annual", + "filter-year-by-year": "hourly, weekly, annual", + }, + } + + # When + created_area = local_study.create_area(area_name=area_to_create, properties=area_properties) + actual_properties = AreaPropertiesLocal(created_area.properties).model_dump(exclude_none=True) + + assert expected_properties == actual_properties + + def test_areas_ini_has_correct_sections(self, actual_thermal_areas_ini): + # Given + expected_areas_ini_sections = ["unserverdenergycost", "spilledenergycost"] + + # Then + assert actual_thermal_areas_ini.parsed_ini.sections() == expected_areas_ini_sections + + def test_areas_ini_has_correct_default_content(self, actual_thermal_areas_ini): + # Given + expected_areas_ini_contents = """[unserverdenergycost] +fr = 0.000000 +it = 0.000000 +at = 0.000000 + +[spilledenergycost] +fr = 0.000000 +it = 0.000000 +at = 0.000000 + +""" + expected_areas_ini = ConfigParser() + expected_areas_ini.read_string(expected_areas_ini_contents) + + # When + with actual_thermal_areas_ini.ini_path.open("r") as areas_ini_file: + actual_areas_ini_contents = areas_ini_file.read() + + # Then + assert actual_areas_ini_contents == expected_areas_ini_contents + assert actual_thermal_areas_ini.parsed_ini.sections() == expected_areas_ini.sections() + assert actual_thermal_areas_ini.parsed_ini == expected_areas_ini + + def test_adequacy_patch_ini_has_correct_section(self, actual_adequacy_patch_ini): + expected_sections = ["adequacy-patch"] + assert actual_adequacy_patch_ini.parsed_ini.sections() == expected_sections + + def test_adequacy_patch_ini_has_correct_content(self, actual_adequacy_patch_ini): + # Given + expected_content = """[adequacy-patch] +adequacy-patch-mode = outside + +""" + expected_ini = ConfigParser() + expected_ini.read_string(expected_content) + + # When + with actual_adequacy_patch_ini.ini_path.open("r") as adequacy_patch_ini_file: + actual_content = adequacy_patch_ini_file.read() + + assert actual_content == expected_content + assert actual_adequacy_patch_ini.parsed_ini.sections() == expected_ini.sections() + assert actual_adequacy_patch_ini.parsed_ini == expected_ini + + def test_created_area_has_hydro(self, local_study_w_areas): + assert local_study_w_areas.get_areas()["fr"].hydro + assert isinstance(local_study_w_areas.get_areas()["it"].hydro, Hydro) + + +class TestCreateLink: + def test_create_link(self, tmp_path, local_study_w_areas): + # Given + link_to_create = "fr_it" + + # When + area_from, area_to = link_to_create.split("_") + link_created = local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + existing_areas=local_study_w_areas.get_areas(), + ) + + assert isinstance(link_created, Link) + + def test_unknown_area_errors(self, tmp_path, local_study_w_areas): + # Given + link_to_create = "es_fr" + fake_study_name = "nonExistantStudy" + fake_config = LocalConfiguration(Path("/fake/path"), fake_study_name) + + # When + area_from, area_to = link_to_create.split("_") + area_from = Area( + name=area_from, + area_service=AreaLocalService(fake_config, fake_study_name), + storage_service=ShortTermStorageLocalService(fake_config, fake_study_name), + thermal_service=ThermalLocalService(fake_config, fake_study_name), + renewable_service=RenewableLocalService(fake_config, fake_study_name), + ) + area_to = local_study_w_areas.get_areas()[area_to] + + with pytest.raises( + LinkCreationError, + match=f"Could not create the link {area_from.name} / {area_to.name}: {area_from.name} does not exist", + ): + local_study_w_areas.create_link( + area_from=area_from, + area_to=area_to, + existing_areas=local_study_w_areas.get_areas(), + ) + + def test_study_areas_not_provided_errors(self, tmp_path, local_study_w_areas): + # With + area_from = local_study_w_areas.get_areas()["fr"] + area_to = local_study_w_areas.get_areas()["it"] + test_service = LinkLocalService( + local_study_w_areas.service.config, + local_study_w_areas.name, + ) + + with pytest.raises( + LinkCreationError, + match=f"Could not create the link {area_from.name} / {area_to.name}: Cannot verify existing areas.", + ): + test_service.create_link( + area_from=area_from, + area_to=area_to, + existing_areas=None, + ) + + def test_create_link_alphabetically(self, tmp_path, local_study): + # Given + areas_to_create = ["fr", "at"] + for area in areas_to_create: + local_study.create_area(area) + link_to_create = "fr_at" + + # When + area_from, area_to = link_to_create.split("_") + link_created = local_study.create_link( + area_from=local_study.get_areas()[area_from], + area_to=local_study.get_areas()[area_to], + existing_areas=local_study.get_areas(), + ) + + assert link_created.area_from.name == "at" + assert link_created.area_to.name == "fr" + + def test_create_link_sets_ini_content(self, tmp_path, local_study_w_areas): + # Given + link_to_create = "fr_it" + expected_content = """[it] +hurdles-cost = false +loop-flow = false +use-phase-shifter = false +transmission-capacities = enabled +asset-type = ac +link-style = plain +link-width = 1 +colorr = 112 +colorg = 112 +colorb = 112 +display-comments = true +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, daily, weekly, monthly, annual + +""" + + # When + area_from, area_to = link_to_create.split("_") + + local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + existing_areas=local_study_w_areas.get_areas(), + ) + + ini_file = tmp_path / local_study_w_areas.name / "input/links" / area_from / "properties.ini" + with open(ini_file, "r") as file: + actual_content = file.read() + + assert actual_content == expected_content + + def test_created_link_has_default_local_properties(self, tmp_path, local_study_w_areas): + # Given + link_to_create = "fr_it" + expected_ini_content = """[it] +hurdles-cost = false +loop-flow = false +use-phase-shifter = false +transmission-capacities = enabled +asset-type = ac +link-style = plain +link-width = 1 +colorr = 112 +colorg = 112 +colorb = 112 +display-comments = true +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, daily, weekly, monthly, annual + +""" + expected_ini = ConfigParser() + expected_ini.read_string(expected_ini_content) + default_properties = LinkPropertiesLocal(LinkProperties()).yield_link_properties() + + # When + area_from, area_to = link_to_create.split("_") + created_link = local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + existing_areas=local_study_w_areas.get_areas(), + ) + ini_file = tmp_path / local_study_w_areas.name / "input/links" / area_from / "properties.ini" + actual_ini = ConfigParser() + with open(ini_file, "r") as file: + actual_ini.read_file(file) + file.seek(0) + actual_ini_content = file.read() + + assert isinstance(created_link.properties, LinkProperties) + assert created_link.properties.model_dump(exclude_none=True) + assert created_link.properties == default_properties + assert actual_ini == expected_ini + assert actual_ini_content == expected_ini_content + + def test_created_link_has_custom_properties(self, tmp_path, local_study_w_areas): + # Given + link_to_create = "fr_it" + link_properties = LinkProperties( + loop_flow=True, + use_phase_shifter=True, + transmission_capacities=TransmissionCapacities.INFINITE, + filter_year_by_year={FilterOption.WEEKLY, FilterOption.DAILY}, + ) + expected_ini_content = """[it] +hurdles-cost = false +loop-flow = true +use-phase-shifter = true +transmission-capacities = infinite +asset-type = ac +link-style = plain +link-width = 1 +colorr = 112 +colorg = 112 +colorb = 112 +display-comments = true +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = daily, weekly + +""" + expected_ini = ConfigParser() + expected_ini.read_string(expected_ini_content) + + # When + area_from, area_to = link_to_create.split("_") + link_created = local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + properties=link_properties, + existing_areas=local_study_w_areas.get_areas(), + ) + created_ini_file = tmp_path / local_study_w_areas.name / "input/links" / area_from / "properties.ini" + actual_ini = ConfigParser() + with open(created_ini_file, "r") as file: + actual_ini.read_file(file) + file.seek(0) + actual_ini_content = file.read() + + # Then + assert actual_ini_content == expected_ini_content + assert link_created.properties == LinkPropertiesLocal(link_properties).yield_link_properties() + assert expected_ini == actual_ini + + def test_multiple_links_created_from_same_area(self, tmp_path, local_study_w_areas): + # Given + local_study_w_areas.create_area("at") + links_to_create = ["fr_at", "at_it"] + expected_ini_string = """[fr] +hurdles-cost = false +loop-flow = false +use-phase-shifter = false +transmission-capacities = enabled +asset-type = ac +link-style = plain +link-width = 1 +colorr = 112 +colorg = 112 +colorb = 112 +display-comments = true +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, daily, weekly, monthly, annual + +[it] +hurdles-cost = false +loop-flow = false +use-phase-shifter = false +transmission-capacities = enabled +asset-type = ac +link-style = plain +link-width = 1 +colorr = 112 +colorg = 112 +colorb = 112 +display-comments = true +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, daily, weekly, monthly, annual + +""" + expected_ini = ConfigParser() + expected_ini.read_string(expected_ini_string) + properties_ini_file = tmp_path / local_study_w_areas.name / "input/links" / "at" / "properties.ini" + + # When + for link in links_to_create: + area_from, area_to = link.split("_") + local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + existing_areas=local_study_w_areas.get_areas(), + ) + + # Then + actual_ini = ConfigParser() + with open(properties_ini_file, "r") as file: + actual_ini.read_file(file) + file.seek(0) + actual_ini_string = file.read() + + for section in expected_ini.sections(): + assert actual_ini.has_section(section) + + assert actual_ini == expected_ini + assert actual_ini_string == expected_ini_string + + def test_multiple_links_created_from_same_area_are_alphabetical(self, tmp_path, local_study_w_areas): + # Given + local_study_w_areas.create_area("at") + links_to_create = ["at_it", "fr_at"] + expected_ini_string = """[fr] +hurdles-cost = false +loop-flow = false +use-phase-shifter = false +transmission-capacities = enabled +asset-type = ac +link-style = plain +link-width = 1 +colorr = 112 +colorg = 112 +colorb = 112 +display-comments = true +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, daily, weekly, monthly, annual + +[it] +hurdles-cost = false +loop-flow = false +use-phase-shifter = false +transmission-capacities = enabled +asset-type = ac +link-style = plain +link-width = 1 +colorr = 112 +colorg = 112 +colorb = 112 +display-comments = true +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, daily, weekly, monthly, annual + +""" + expected_ini = ConfigParser() + expected_ini.read_string(expected_ini_string) + properties_ini_file = tmp_path / local_study_w_areas.name / "input/links" / "at" / "properties.ini" + + # When + for link in links_to_create: + area_from, area_to = link.split("_") + local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + existing_areas=local_study_w_areas.get_areas(), + ) + + # Then + actual_ini = ConfigParser() + with open(properties_ini_file, "r") as file: + actual_ini.read_file(file) + file.seek(0) + actual_ini_string = file.read() + + assert actual_ini == expected_ini + assert actual_ini_string == expected_ini_string + + def test_duplicate_links_raises_error(self, tmp_path, local_study_w_links): + # Given + link_to_create = "fr_it" + + # When + area_from, area_to = link_to_create.split("_") + + # Then + with pytest.raises( + CustomError, + match="""Link exists already, section already exists in properties.ini: + +Section 'it' already exists""", + ): + local_study_w_links.create_link( + area_from=local_study_w_links.get_areas()[area_from], + area_to=local_study_w_links.get_areas()[area_to], + existing_areas=local_study_w_links.get_areas(), + ) + + def test_created_link_has_default_ui_values(self, tmp_path, local_study_w_areas): + # Given + link_to_create = "fr / it" + actual_ini_file = tmp_path / local_study_w_areas.name / "input" / "links" / "fr" / "properties.ini" + actual_ini = ConfigParser() + expected_ini_string = """[it] +hurdles-cost = false +loop-flow = false +use-phase-shifter = false +transmission-capacities = enabled +asset-type = ac +link-style = plain +link-width = 1 +colorr = 112 +colorg = 112 +colorb = 112 +display-comments = true +filter-synthesis = hourly, daily, weekly, monthly, annual +filter-year-by-year = hourly, daily, weekly, monthly, annual + +""" + expected_ini = ConfigParser() + expected_ini.read_string(expected_ini_string) + + # When + area_from, area_to = link_to_create.split(" / ") + local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + existing_areas=local_study_w_areas.get_areas(), + ) + with open(actual_ini_file, "r") as file: + actual_ini.read_file(file) + file.seek(0) + actual_ini_string = file.read() + + # Then + assert isinstance(local_study_w_areas.get_links()[link_to_create].ui, LinkUi) + assert actual_ini == expected_ini + assert actual_ini_string == expected_ini_string + + def test_created_link_with_custom_ui_values(self, tmp_path, local_study_w_areas): + # Given + link_to_create = "fr / it" + actual_ini_file = tmp_path / local_study_w_areas.name / "input" / "links" / "fr" / "properties.ini" + actual_ini = ConfigParser() + expected_ini_string = """[it] +hurdles-cost = true +loop-flow = false +use-phase-shifter = false +transmission-capacities = ignore +asset-type = gaz +link-style = dot +link-width = 1 +colorr = 234 +colorg = 123 +colorb = 0 +display-comments = true +filter-synthesis = hourly, weekly, monthly +filter-year-by-year = hourly, daily, weekly, monthly, annual + +""" + expected_ini = ConfigParser() + expected_ini.read_string(expected_ini_string) + expected_properties = LinkProperties( + hurdles_cost=True, + transmission_capacities=TransmissionCapacities.DISABLED, + asset_type=AssetType.GAZ, + filter_synthesis={ + FilterOption.MONTHLY, + FilterOption.HOURLY, + FilterOption.WEEKLY, + }, + ) + expected_ui = LinkUi(link_style=LinkStyle.DOT, colorr=234, colorg=123, colorb=0) + + # When + area_from, area_to = link_to_create.split(" / ") + created_link = local_study_w_areas.create_link( + area_from=local_study_w_areas.get_areas()[area_from], + area_to=local_study_w_areas.get_areas()[area_to], + properties=expected_properties, + ui=expected_ui, + existing_areas=local_study_w_areas.get_areas(), + ) + with open(actual_ini_file, "r") as file: + actual_ini.read_file(file) + file.seek(0) + actual_ini_string = file.read() + actual_properties = created_link.properties + actual_ui = created_link.ui + + # Then + assert isinstance(local_study_w_areas.get_links()[link_to_create].ui, LinkUi) + assert actual_ini == expected_ini + assert actual_ini_string == expected_ini_string + assert actual_properties == LinkPropertiesLocal(expected_properties).yield_link_properties() + assert actual_ui == LinkUiLocal(expected_ui).yield_link_ui() diff --git a/tests/antares/services/local_services/test_study_read.py b/tests/antares/services/local_services/test_study_read.py index 5478abb0..a3b55bdd 100644 --- a/tests/antares/services/local_services/test_study_read.py +++ b/tests/antares/services/local_services/test_study_read.py @@ -1,37 +1,39 @@ -import logging -import os -import time -from unittest import mock - -from pathlib import Path -import pytest - -from antares.config.local_configuration import LocalConfiguration -from antares.model.study import create_study_local, read_study_local - - - -class TestReadStudy: - - def test_directory_not_exists_error(self, caplog): - - local_path = r"/fake/path/" - study_name = "study_name" - with caplog.at_level(logging.ERROR): - with pytest.raises(ValueError, match=f"Provided directory {local_path} does not exist."): - read_study_local(study_name, "880", LocalConfiguration(local_path, study_name)) - - def test_directory_permission_denied(self, tmp_path, caplog): - # Given - study_name = "studyTest" - restricted_dir = tmp_path / study_name - - restricted_dir.mkdir(parents=True, exist_ok=True) - restricted_path = restricted_dir / "file.txt" - restricted_path.touch(exist_ok=True) - with caplog.at_level(logging.ERROR): - with mock.patch("pathlib.Path.iterdir", - side_effect=PermissionError(f"Some content cannot be accessed in {restricted_dir}")): - escaped_path = str(restricted_dir).replace("\\", "\\\\") - with pytest.raises(PermissionError, match=f"Some content cannot be accessed in {escaped_path}"): - read_study_local(study_name, "880", LocalConfiguration(tmp_path, study_name)) +import logging +import os +import time +from unittest import mock + +from pathlib import Path +import pytest + +from antares.config.local_configuration import LocalConfiguration +from antares.model.study import create_study_local, read_study_local + + +class TestReadStudy: + def test_directory_not_exists_error(self, caplog): + local_path = r"/fake/path/" + study_name = "study_name" + with caplog.at_level(logging.ERROR): + with pytest.raises(ValueError, match=f"Provided directory {local_path} does not exist."): + read_study_local(study_name, "880", LocalConfiguration(local_path, study_name)) + + def test_directory_permission_denied(self, tmp_path, caplog): + # Given + study_name = "studyTest" + restricted_dir = tmp_path / study_name + + restricted_dir.mkdir(parents=True, exist_ok=True) + restricted_path = restricted_dir / "file.txt" + restricted_path.touch(exist_ok=True) + with caplog.at_level(logging.ERROR): + with mock.patch( + "pathlib.Path.iterdir", + side_effect=PermissionError(f"Some content cannot be accessed in {restricted_dir}"), + ): + escaped_path = str(restricted_dir).replace("\\", "\\\\") + with pytest.raises( + PermissionError, + match=f"Some content cannot be accessed in {escaped_path}", + ): + read_study_local(study_name, "880", LocalConfiguration(tmp_path, study_name)) diff --git a/tests/antares/tools/conftest.py b/tests/antares/tools/conftest.py index 112405c6..1eb14f5c 100644 --- a/tests/antares/tools/conftest.py +++ b/tests/antares/tools/conftest.py @@ -1,15 +1,15 @@ -import numpy as np -import pandas as pd -import pytest - -from antares.tools.time_series_tool import TimeSeriesFile, TimeSeriesFileType - - -@pytest.fixture -def time_series_data(): - return pd.DataFrame(np.zeros([2, 3])) - - -@pytest.fixture -def time_series_file(tmp_path, time_series_data): - return TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, "test", time_series_data) +import numpy as np +import pandas as pd +import pytest + +from antares.tools.time_series_tool import TimeSeriesFile, TimeSeriesFileType + + +@pytest.fixture +def time_series_data(): + return pd.DataFrame(np.zeros([2, 3])) + + +@pytest.fixture +def time_series_file(tmp_path, time_series_data): + return TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, "test", time_series_data) diff --git a/tests/antares/tools/test_contents_tool.py b/tests/antares/tools/test_contents_tool.py index cdcf31a6..227f6e7b 100644 --- a/tests/antares/tools/test_contents_tool.py +++ b/tests/antares/tools/test_contents_tool.py @@ -1,33 +1,36 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import numpy as np -import pandas as pd - -from antares.tools.matrix_tool import prepare_args_replace_matrix - - -def test_prepare_args(): - # Given - - matrix = np.random.randint(0, 2, size=(8760, 1)) - series = pd.DataFrame(matrix, columns=["Value"]) - series_path = "input/thermal/series/area_id/cluster_name/series" - - # Expected result - expected_result = {"action": "replace_matrix", "args": {"target": series_path, "matrix": matrix.tolist()}} - - # When - result = prepare_args_replace_matrix(series, series_path) - - # Then - assert result == expected_result +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import numpy as np +import pandas as pd + +from antares.tools.matrix_tool import prepare_args_replace_matrix + + +def test_prepare_args(): + # Given + + matrix = np.random.randint(0, 2, size=(8760, 1)) + series = pd.DataFrame(matrix, columns=["Value"]) + series_path = "input/thermal/series/area_id/cluster_name/series" + + # Expected result + expected_result = { + "action": "replace_matrix", + "args": {"target": series_path, "matrix": matrix.tolist()}, + } + + # When + result = prepare_args_replace_matrix(series, series_path) + + # Then + assert result == expected_result diff --git a/tests/antares/tools/test_time_series_tool.py b/tests/antares/tools/test_time_series_tool.py index 5f4d8b21..c7ed96ea 100644 --- a/tests/antares/tools/test_time_series_tool.py +++ b/tests/antares/tools/test_time_series_tool.py @@ -1,128 +1,141 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import numpy as np -import pandas as pd -import pytest - -from antares.tools.time_series_tool import TimeSeries, TimeSeriesFile, TimeSeriesFileType - - -class TestTimeSeries: - def test_empty_ts_is_dataframe(self): - # Given - time_series = TimeSeries() - - assert isinstance(time_series.time_series, pd.DataFrame) - assert time_series.time_series.empty - assert time_series.time_series.equals(pd.DataFrame([])) - - def test_time_series_can_be_set(self, time_series_data): - # Given - time_series = TimeSeries() - expected_time_series = pd.DataFrame(np.zeros(time_series_data.shape)) - - # When - time_series.time_series = time_series_data - - # Then - assert time_series.time_series.equals(expected_time_series) - - def test_time_series_can_have_file(self, time_series_file): - # Given - time_series = TimeSeries() - - # When - time_series.local_file = time_series_file - - # Then - assert time_series.local_file.file_path.is_file() - - def test_time_series_can_update_file(self, time_series_file, time_series_data): - # Given - time_series = TimeSeries() - expected_file_content = pd.DataFrame(np.zeros(time_series_data.shape)) - update_file_content = pd.DataFrame(np.ones(time_series_data.shape)) - - # When - time_series.local_file = time_series_file - - # Then - assert time_series.time_series.equals(expected_file_content) - - # When - time_series.time_series = update_file_content - - # Then - actual_file_content = pd.read_csv( - time_series.local_file.file_path, sep="\t", header=None, index_col=None, encoding="utf-8" - ) - assert actual_file_content.equals(update_file_content) - - -class TestTimeSeriesFile: - def test_time_series_file_can_be_set(self, time_series_file, time_series_data): - # Given - time_series = TimeSeries() - - # When - time_series.local_file = time_series_file - - # Then - assert time_series.time_series.equals(time_series_data) - assert time_series_file.file_path.is_file() - assert time_series.local_file is not None - - def test_time_series_file_time_series_can_be_updated(self, time_series_file, time_series_data): - # Given - time_series = TimeSeries(pd.DataFrame(np.ones([2, 3]))) - - # When - time_series_file.time_series = time_series.time_series - - with pytest.raises(AssertionError): - assert time_series_file.time_series.equals(time_series_data) - # assert time_series.local_file.file_path.is_file() - assert time_series_file.time_series.equals(time_series.time_series) - - def test_no_area_provided_gives_error(self, tmp_path, time_series_data): - # Given - with pytest.raises(ValueError, match="area_id is required for this file type."): - TimeSeriesFile(ts_file_type=TimeSeriesFileType.RESERVES, study_path=tmp_path, time_series=time_series_data) - - def test_file_exists_time_series_provided_gives_error(self, tmp_path, time_series_data): - # Given - time_series = TimeSeries(time_series_data) - file_name = TimeSeriesFileType.RESERVES.value.format(area_id="test") - - # When - (tmp_path / file_name).parent.mkdir(exist_ok=True, parents=True) - time_series.time_series.to_csv(tmp_path / file_name, sep="\t", header=False, index=False, encoding="utf-8") - - # Then - with pytest.raises( - ValueError, match=f"File {tmp_path / file_name} already exists and a time series was provided." - ): - TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, "test", time_series.time_series) - - def test_file_exists_no_time_series_provided(self, tmp_path, time_series_data): - # Given - time_series = TimeSeries(time_series_data) - file_name = tmp_path / TimeSeriesFileType.RESERVES.value.format(area_id="test") - - # When - file_name.parent.mkdir(exist_ok=True, parents=True) - time_series.time_series.to_csv(file_name, sep="\t", header=False, index=False, encoding="utf-8") - time_series_file = TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, "test") - - # Then - assert time_series_file.time_series.equals(time_series_data) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. + +import numpy as np +import pandas as pd +import pytest + +from antares.tools.time_series_tool import ( + TimeSeries, + TimeSeriesFile, + TimeSeriesFileType, +) + + +class TestTimeSeries: + def test_empty_ts_is_dataframe(self): + # Given + time_series = TimeSeries() + + assert isinstance(time_series.time_series, pd.DataFrame) + assert time_series.time_series.empty + assert time_series.time_series.equals(pd.DataFrame([])) + + def test_time_series_can_be_set(self, time_series_data): + # Given + time_series = TimeSeries() + expected_time_series = pd.DataFrame(np.zeros(time_series_data.shape)) + + # When + time_series.time_series = time_series_data + + # Then + assert time_series.time_series.equals(expected_time_series) + + def test_time_series_can_have_file(self, time_series_file): + # Given + time_series = TimeSeries() + + # When + time_series.local_file = time_series_file + + # Then + assert time_series.local_file.file_path.is_file() + + def test_time_series_can_update_file(self, time_series_file, time_series_data): + # Given + time_series = TimeSeries() + expected_file_content = pd.DataFrame(np.zeros(time_series_data.shape)) + update_file_content = pd.DataFrame(np.ones(time_series_data.shape)) + + # When + time_series.local_file = time_series_file + + # Then + assert time_series.time_series.equals(expected_file_content) + + # When + time_series.time_series = update_file_content + + # Then + actual_file_content = pd.read_csv( + time_series.local_file.file_path, + sep="\t", + header=None, + index_col=None, + encoding="utf-8", + ) + assert actual_file_content.equals(update_file_content) + + +class TestTimeSeriesFile: + def test_time_series_file_can_be_set(self, time_series_file, time_series_data): + # Given + time_series = TimeSeries() + + # When + time_series.local_file = time_series_file + + # Then + assert time_series.time_series.equals(time_series_data) + assert time_series_file.file_path.is_file() + assert time_series.local_file is not None + + def test_time_series_file_time_series_can_be_updated(self, time_series_file, time_series_data): + # Given + time_series = TimeSeries(pd.DataFrame(np.ones([2, 3]))) + + # When + time_series_file.time_series = time_series.time_series + + with pytest.raises(AssertionError): + assert time_series_file.time_series.equals(time_series_data) + # assert time_series.local_file.file_path.is_file() + assert time_series_file.time_series.equals(time_series.time_series) + + def test_no_area_provided_gives_error(self, tmp_path, time_series_data): + # Given + with pytest.raises(ValueError, match="area_id is required for this file type."): + TimeSeriesFile( + ts_file_type=TimeSeriesFileType.RESERVES, + study_path=tmp_path, + time_series=time_series_data, + ) + + def test_file_exists_time_series_provided_gives_error(self, tmp_path, time_series_data): + # Given + time_series = TimeSeries(time_series_data) + file_name = TimeSeriesFileType.RESERVES.value.format(area_id="test") + + # When + (tmp_path / file_name).parent.mkdir(exist_ok=True, parents=True) + time_series.time_series.to_csv(tmp_path / file_name, sep="\t", header=False, index=False, encoding="utf-8") + + # Then + with pytest.raises( + ValueError, + match=f"File {tmp_path / file_name} already exists and a time series was provided.", + ): + TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, "test", time_series.time_series) + + def test_file_exists_no_time_series_provided(self, tmp_path, time_series_data): + # Given + time_series = TimeSeries(time_series_data) + file_name = tmp_path / TimeSeriesFileType.RESERVES.value.format(area_id="test") + + # When + file_name.parent.mkdir(exist_ok=True, parents=True) + time_series.time_series.to_csv(file_name, sep="\t", header=False, index=False, encoding="utf-8") + time_series_file = TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, "test") + + # Then + assert time_series_file.time_series.equals(time_series_data) diff --git a/tests/integration/antares_web_desktop.py b/tests/integration/antares_web_desktop.py index 39733640..1f725fff 100644 --- a/tests/integration/antares_web_desktop.py +++ b/tests/integration/antares_web_desktop.py @@ -1,73 +1,79 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. -import os -import socket -import subprocess -import time -from pathlib import Path - -import requests - - -class AntaresWebDesktop: - """ - Launches an AntaresWebDesktop instance for integration tests - """ - - def __init__(self): - antares_web_desktop_path = [p for p in Path(__file__).parents if p.name == "antares_craft"][ - 0 - ] / "AntaresWebDesktop" - config_path = antares_web_desktop_path / "config.yaml" - if os.name != "nt": - executable_path = antares_web_desktop_path / "AntaresWeb" / "AntaresWebServer" - else: - executable_path = antares_web_desktop_path / "AntaresWeb" / "AntaresWebServer.exe" - args = [str(executable_path), "-c", str(config_path), "--auto-upgrade-db", "--no-front"] - self.desktop_path = antares_web_desktop_path - self.host = "127.0.0.1" - self.port = 8080 - self.url = f"http://{self.host}:{self.port}" - self.process = subprocess.Popen(args, shell=True, cwd=str(antares_web_desktop_path)) - - @staticmethod - def is_port_open(host, port): - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - return sock.connect_ex((host, port)) == 0 - - def wait_for_server_to_start(self): - timeout = 10 - interval = 1 - elapsed_time = 0 - while elapsed_time < timeout: - if self.is_port_open(self.host, self.port): - return - time.sleep(interval) - elapsed_time += interval - raise Exception("The app did not start inside the given delays.") - - def kill(self): - """ - Removes every study to ensure tests reproductibility + it cleans the database. - It also kills the AntaresWebDesktop instance. - """ - session = requests.Session() - res = session.get(self.url + "/v1/studies") - studies = res.json() - for study in studies: - session.delete(self.url + f"/v1/studies/{study}?children=True") - self.process.terminate() - self.process.wait() - pids = subprocess.run(["pgrep AntaresWeb"], capture_output=True, shell=True).stdout.split() - for pid in pids: - subprocess.run([f"kill {int(pid)}"], shell=True) - time.sleep(0.1) +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. +import os +import socket +import subprocess +import time +from pathlib import Path + +import requests + + +class AntaresWebDesktop: + """ + Launches an AntaresWebDesktop instance for integration tests + """ + + def __init__(self): + antares_web_desktop_path = [p for p in Path(__file__).parents if p.name == "antares_craft"][ + 0 + ] / "AntaresWebDesktop" + config_path = antares_web_desktop_path / "config.yaml" + if os.name != "nt": + executable_path = antares_web_desktop_path / "AntaresWeb" / "AntaresWebServer" + else: + executable_path = antares_web_desktop_path / "AntaresWeb" / "AntaresWebServer.exe" + args = [ + str(executable_path), + "-c", + str(config_path), + "--auto-upgrade-db", + "--no-front", + ] + self.desktop_path = antares_web_desktop_path + self.host = "127.0.0.1" + self.port = 8080 + self.url = f"http://{self.host}:{self.port}" + self.process = subprocess.Popen(args, shell=True, cwd=str(antares_web_desktop_path)) + + @staticmethod + def is_port_open(host, port): + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + return sock.connect_ex((host, port)) == 0 + + def wait_for_server_to_start(self): + timeout = 10 + interval = 1 + elapsed_time = 0 + while elapsed_time < timeout: + if self.is_port_open(self.host, self.port): + return + time.sleep(interval) + elapsed_time += interval + raise Exception("The app did not start inside the given delays.") + + def kill(self): + """ + Removes every study to ensure tests reproductibility + it cleans the database. + It also kills the AntaresWebDesktop instance. + """ + session = requests.Session() + res = session.get(self.url + "/v1/studies") + studies = res.json() + for study in studies: + session.delete(self.url + f"/v1/studies/{study}?children=True") + self.process.terminate() + self.process.wait() + pids = subprocess.run(["pgrep AntaresWeb"], capture_output=True, shell=True).stdout.split() + for pid in pids: + subprocess.run([f"kill {int(pid)}"], shell=True) + time.sleep(0.1) diff --git a/tests/integration/test_web_client.py b/tests/integration/test_web_client.py index 424900dc..1427f258 100644 --- a/tests/integration/test_web_client.py +++ b/tests/integration/test_web_client.py @@ -1,394 +1,444 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. -import numpy as np -import pandas as pd -import pytest - -from antares.api_conf.api_conf import APIconf -from antares.exceptions.exceptions import ( - LoadMatrixUploadError, - STStorageMatrixUploadError, - BindingConstraintCreationError, - ConstraintMatrixUpdateError, - AreaDeletionError, -) -from antares.model.settings.advanced_parameters import UnitCommitmentMode, AdvancedProperties -from antares.model.study import create_study_api -from antares.model.binding_constraint import BindingConstraintProperties, LinkData, ClusterData, ConstraintTerm -from antares.model.link import LinkUi, LinkProperties, LinkStyle -from antares.model.renewable import RenewableClusterProperties, RenewableClusterGroup, TimeSeriesInterpretation -from antares.model.settings import StudySettings, GeneralProperties, PlaylistData -from antares.model.settings.general import Mode -from antares.model.st_storage import STStorageProperties, STStorageGroup, STStorageMatrixName -from antares.model.thermal import ThermalClusterProperties, ThermalClusterGroup - -from antares.model.area import AreaUi, AreaProperties, AdequacyPatchMode, FilterOption -from tests.integration.antares_web_desktop import AntaresWebDesktop - - -# todo add integration tests for matrices -class TestWebClient: - def test_creation_lifecycle(self): - app = AntaresWebDesktop() - app.wait_for_server_to_start() - - api_config = APIconf(api_host=app.url, token="", verify=False) - - try: - study = create_study_api("antares-craft-test", "880", api_config) - - # tests area creation with default values - area_name = "FR" - area_fr = study.create_area(area_name) - assert area_fr.name == area_name - assert area_fr.id == area_name.lower() - - # test upload load matrix - # Case that fails - wrong_load_matrix = pd.DataFrame(data=[[0]]) - with pytest.raises( - LoadMatrixUploadError, - match=f"Could not upload load matrix for area {area_fr.id}: Expected 8760 rows and received 1", - ): - area_fr.upload_load_matrix(wrong_load_matrix) - - # Case that succeeds - load_matrix = pd.DataFrame(data=np.zeros((8760, 1))) - area_fr.upload_load_matrix(load_matrix) - - # tests get load matrix - assert area_fr.get_load_matrix().equals(load_matrix) - - # tests area creation with ui values - area_ui = AreaUi(x=100, color_rgb=[255, 0, 0]) - area_name = "BE?" - area_be = study.create_area(area_name, ui=area_ui) - assert area_be.name == area_name - assert area_be.id == "be" - area_ui = area_be.ui - assert area_ui.x == area_ui.x - assert area_ui.color_rgb == area_ui.color_rgb - - # tests area creation with properties - properties = AreaProperties() - properties.energy_cost_spilled = 100 - properties.adequacy_patch_mode = AdequacyPatchMode.INSIDE - properties.filter_synthesis = [FilterOption.HOURLY, FilterOption.DAILY, FilterOption.HOURLY] - area_name = "DE" - area_de = study.create_area(area_name, properties=properties) - assert area_de.properties.energy_cost_spilled == 100 - assert area_de.properties.adequacy_patch_mode == AdequacyPatchMode.INSIDE - assert area_de.properties.filter_synthesis == {FilterOption.HOURLY, FilterOption.DAILY} - - # tests link creation with default values - link_de_fr = study.create_link(area_from=area_de, area_to=area_fr) - assert link_de_fr.area_from == area_de - assert link_de_fr.area_to == area_fr - assert link_de_fr.name == f"{area_de.id} / {area_fr.id}" - - # tests link creation with ui and properties - link_ui = LinkUi(colorr=44) - link_properties = LinkProperties(hurdles_cost=True) - link_properties.filter_year_by_year = [FilterOption.HOURLY] - link_be_fr = study.create_link(area_from=area_be, area_to=area_fr, ui=link_ui, properties=link_properties) - assert link_be_fr.ui.colorr == 44 - assert link_be_fr.properties.hurdles_cost - assert link_be_fr.properties.filter_year_by_year == {FilterOption.HOURLY} - - # asserts study contains all links and areas - assert study.get_areas() == {area_be.id: area_be, area_fr.id: area_fr, area_de.id: area_de} - assert study.get_links() == {link_be_fr.name: link_be_fr, link_de_fr.name: link_de_fr} - - # test thermal cluster creation with default values - thermal_name = "Cluster_test %?" - thermal_fr = area_fr.create_thermal_cluster(thermal_name) - assert thermal_fr.name == thermal_name.lower() - # AntaresWeb has id issues for thermal/renewable clusters, - # so we force the name in lowercase to avoid issues. - assert thermal_fr.id == "cluster_test" - - # test thermal cluster creation with properties - thermal_name = "gaz_be" - thermal_properties = ThermalClusterProperties(efficiency=55) - thermal_properties.group = ThermalClusterGroup.GAS - thermal_be = area_be.create_thermal_cluster(thermal_name, thermal_properties) - properties = thermal_be.properties - assert properties.efficiency == 55 - assert properties.group == ThermalClusterGroup.GAS - - # test thermal cluster creation with prepro_modulation matrices - thermal_name = "matrices_be" - prepro_modulation_matrix = pd.DataFrame(data=np.ones((8760, 4))) - modulation_matrix = pd.DataFrame(data=np.ones((8760, 4))) - series_matrix = pd.DataFrame(data=np.ones((8760, 4))) - CO2Cost_matrix = pd.DataFrame(data=np.ones((8760, 1))) - fuelCost_matrix = pd.DataFrame(data=np.ones((8760, 1))) - - # Case that succeeds - thermal_value_be = area_fr.create_thermal_cluster_with_matrices( - cluster_name=thermal_name, - parameters=thermal_properties, - prepro=prepro_modulation_matrix, - modulation=modulation_matrix, - series=series_matrix, - CO2Cost=CO2Cost_matrix, - fuelCost=fuelCost_matrix, - ) - - prepro = thermal_value_be.get_prepro_data_matrix() - modulation = thermal_value_be.get_prepro_modulation_matrix() - series = thermal_value_be.get_series_matrix() - CO2 = thermal_value_be.get_co2_cost_matrix() - fuel = thermal_value_be.get_fuel_cost_matrix() - - # tests get thermal matrix - assert prepro.equals(prepro_modulation_matrix) - assert modulation.equals(modulation_matrix) - assert series.equals(series_matrix) - assert CO2.equals(CO2Cost_matrix) - assert fuel.equals(fuelCost_matrix) - - # test renewable cluster creation with default values - renewable_name = "cluster_test %?" - renewable_fr = area_fr.create_renewable_cluster(renewable_name, None, None) - assert renewable_fr.name == renewable_name - assert renewable_fr.id == "cluster_test" - - # test renewable cluster creation with properties - renewable_name = "wind_onshore" - renewable_properties = RenewableClusterProperties(enabled=False) - renewable_properties.group = RenewableClusterGroup.WIND_ON_SHORE - renewable_onshore = area_fr.create_renewable_cluster(renewable_name, renewable_properties, None) - properties = renewable_onshore.properties - assert not properties.enabled - assert properties.group == RenewableClusterGroup.WIND_ON_SHORE - - # test short term storage creation with default values - st_storage_name = "cluster_test %?" - storage_fr = area_fr.create_st_storage(st_storage_name) - assert storage_fr.name == st_storage_name - assert storage_fr.id == "cluster_test" - - # test short term storage creation with properties - st_storage_name = "wind_onshore" - storage_properties = STStorageProperties(reservoir_capacity=0.5) - storage_properties.group = STStorageGroup.BATTERY - battery_fr = area_fr.create_st_storage(st_storage_name, storage_properties) - properties = battery_fr.properties - assert properties.reservoir_capacity == 0.5 - assert properties.group == STStorageGroup.BATTERY - - # tests upload matrix for short term storage. - # Case that fails - wrong_matrix = pd.DataFrame(data=[[0]]) - with pytest.raises( - STStorageMatrixUploadError, - match=f"Could not upload {STStorageMatrixName.INFLOWS.value} matrix for storage {battery_fr.id}" - f" inside area {area_fr.id}", - ): - battery_fr.upload_storage_inflows(wrong_matrix) - - # Case that succeeds - injection_matrix = pd.DataFrame(data=np.zeros((8760, 1))) - battery_fr.upload_pmax_injection(injection_matrix) - - # tests get pmax_injection matrix - assert battery_fr.get_pmax_injection().equals(injection_matrix) - - # asserts areas contains the clusters + short term storages - assert area_be.get_thermals() == {thermal_be.id: thermal_be} - assert area_fr.get_thermals() == {thermal_fr.id: thermal_fr, thermal_value_be.id: thermal_value_be} - assert area_be.get_renewables() == {} - assert area_fr.get_renewables() == {renewable_onshore.id: renewable_onshore, renewable_fr.id: renewable_fr} - assert area_be.get_st_storages() == {} - assert area_fr.get_st_storages() == {battery_fr.id: battery_fr, storage_fr.id: storage_fr} - - # test binding constraint creation without terms - properties = BindingConstraintProperties(enabled=False) - properties.group = "group_1" - constraint_1 = study.create_binding_constraint(name="bc_1", properties=properties) - assert constraint_1.name == "bc_1" - assert not constraint_1.properties.enabled - assert constraint_1.properties.group == "group_1" - assert constraint_1.get_terms() == {} - - # test binding constraint creation with terms - link_data = LinkData(area1=area_be.id, area2=area_fr.id) - link_term_2 = ConstraintTerm(data=link_data, weight=2) - cluster_data = ClusterData(area=area_fr.id, cluster=thermal_fr.id) - cluster_term = ConstraintTerm(data=cluster_data, weight=4.5, offset=3) - terms = [link_term_2, cluster_term] - constraint_2 = study.create_binding_constraint(name="bc_2", terms=terms) - assert constraint_2.name == "bc_2" - assert constraint_2.get_terms() == {link_term_2.id: link_term_2, cluster_term.id: cluster_term} - - # test constraint creation with matrices - # Case that fails - wrong_matrix = pd.DataFrame(data=(np.ones((12, 1)))) - with pytest.raises( - BindingConstraintCreationError, - match="Could not create the binding constraint bc_3", - ): - study.create_binding_constraint(name="bc_3", less_term_matrix=wrong_matrix) - - # Other case with failure - with pytest.raises( - ConstraintMatrixUpdateError, - match=f"Could not update matrix eq for binding constraint {constraint_2.id}", - ): - constraint_2.update_equal_term_matrix(wrong_matrix) - - # Case that succeeds - properties = BindingConstraintProperties(operator="less") - matrix = pd.DataFrame(data=(np.ones((8784, 1)))) - constraint_3 = study.create_binding_constraint(name="bc_3", less_term_matrix=matrix, properties=properties) - assert constraint_3.get_less_term_matrix().equals(matrix) - - # test update constraint matrices - new_matrix = pd.DataFrame(data=(np.ones((8784, 1)))) - new_matrix.iloc[0, 0] = 72 - properties.operator = "equal" - constraint_3.update_properties(properties) - constraint_3.update_equal_term_matrix(new_matrix) - assert constraint_3.get_equal_term_matrix().equals(new_matrix) - - # test adding terms to a constraint - link_data = LinkData(area1=area_de.id, area2=area_fr.id) - link_term_1 = ConstraintTerm(data=link_data, weight=15) - cluster_data = ClusterData(area=area_be.id, cluster=thermal_be.id) - cluster_term = ConstraintTerm(data=cluster_data, weight=100) - terms = [link_term_1, cluster_term] - constraint_1.add_terms(terms) - assert constraint_1.get_terms() == {link_term_1.id: link_term_1, cluster_term.id: cluster_term} - - # asserts study contains the constraints - assert study.get_binding_constraints() == { - constraint_1.id: constraint_1, - constraint_2.id: constraint_2, - constraint_3.id: constraint_3, - } - - # test area property edition - new_props = AreaProperties() - new_props.adequacy_patch_mode = AdequacyPatchMode.VIRTUAL - area_fr.update_properties(new_props) - assert area_fr.properties.adequacy_patch_mode == AdequacyPatchMode.VIRTUAL - - # test area ui edition - new_ui = AreaUi() - new_ui.x = 100 - area_fr.update_ui(new_ui) - assert area_fr.ui.x == 100 - - # test link property edition - new_props = LinkProperties() - new_props.hurdles_cost = False - link_be_fr.update_properties(new_props) - assert not link_be_fr.properties.hurdles_cost - - # tests link ui edition - new_ui = LinkUi() - new_ui.link_style = LinkStyle.PLAIN - link_be_fr.update_ui(new_ui) - assert link_be_fr.ui.link_style == LinkStyle.PLAIN - - # tests thermal properties update - new_props = ThermalClusterProperties() - new_props.group = ThermalClusterGroup.NUCLEAR - thermal_fr.update_properties(new_props) - assert thermal_fr.properties.group == ThermalClusterGroup.NUCLEAR - - # tests renewable properties update - new_props = RenewableClusterProperties() - new_props.ts_interpretation = TimeSeriesInterpretation.POWER_GENERATION - renewable_onshore.update_properties(new_props) - assert renewable_onshore.properties.ts_interpretation == TimeSeriesInterpretation.POWER_GENERATION - - # tests short term storage properties update - new_props = STStorageProperties() - new_props.group = STStorageGroup.PONDAGE - battery_fr.update_properties(new_props) - assert battery_fr.properties.group == STStorageGroup.PONDAGE - - # tests constraint properties update - new_props = BindingConstraintProperties() - new_props.group = "another_group" - constraint_1.update_properties(new_props) - assert constraint_1.properties.group == "another_group" - - # tests constraint deletion - study.delete_binding_constraint(constraint_1) - assert constraint_1.id not in study.get_binding_constraints() - - # tests constraint term deletion - constraint_2.delete_term(link_term_2) - assert link_term_2.id not in constraint_2.get_terms() - - # tests link deletion - study.delete_link(link_de_fr) - assert link_de_fr.name not in study.get_links() - - # tests thermal cluster deletion - area_be.delete_thermal_cluster(thermal_be) - assert area_be.get_thermals() == {} - - # tests renewable cluster deletion - area_fr.delete_renewable_clusters([renewable_onshore, renewable_fr]) - assert area_fr.get_renewables() == {} - - # tests short term storage deletion - area_fr.delete_st_storage(battery_fr) - assert battery_fr.id not in study.get_areas().get(area_be.id).get_st_storages() - - # tests area deletion error - with pytest.raises( - AreaDeletionError, - match=f"Could not delete the area fr: Area '{area_fr.id}' is not allowed " - f"to be deleted, because it is referenced in " - f"the following binding constraints:\n1- 'bc_2'.", - ): - study.delete_area(area_fr) - - # tests area deletion success - study.delete_area(area_de) - assert area_de.id not in study.get_areas() - - # test study creation with settings - settings = StudySettings() - settings.general_properties = GeneralProperties(mode="Adequacy") - settings.general_properties.year_by_year = False - settings.playlist = {"1": {"status": False, "weight": 1}} - new_study = create_study_api("second_study", "880", api_config, settings) - settings = new_study.get_settings() - assert settings.general_properties.mode == Mode.ADEQUACY - assert not settings.general_properties.year_by_year - assert settings.playlist == {"1": PlaylistData(status=False, weight=1)} - - # tests update settings - new_settings = StudySettings() - # Really important note. To instance such object with value you must respect camel case. - # Another way to do so is to instance the object and then fill its values - new_settings.general_properties = GeneralProperties(nbYears=4) - new_settings.advanced_properties = AdvancedProperties() - new_settings.advanced_properties.unit_commitment_mode = UnitCommitmentMode.MILP - new_study.update_settings(new_settings) - assert new_study.get_settings().general_properties.mode == Mode.ADEQUACY - assert new_study.get_settings().general_properties.nb_years == 4 - assert new_study.get_settings().advanced_properties.unit_commitment_mode == UnitCommitmentMode.MILP - - old_settings = new_study.get_settings() - empty_settings = StudySettings() - new_study.update_settings(empty_settings) - assert old_settings == new_study.get_settings() - - finally: - app.kill() +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# +# See AUTHORS.txt +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# SPDX-License-Identifier: MPL-2.0 +# +# This file is part of the Antares project. +import numpy as np +import pandas as pd +import pytest + +from antares.api_conf.api_conf import APIconf +from antares.exceptions.exceptions import ( + LoadMatrixUploadError, + STStorageMatrixUploadError, + BindingConstraintCreationError, + ConstraintMatrixUpdateError, + AreaDeletionError, +) +from antares.model.settings.advanced_parameters import ( + UnitCommitmentMode, + AdvancedProperties, +) +from antares.model.study import create_study_api +from antares.model.binding_constraint import ( + BindingConstraintProperties, + LinkData, + ClusterData, + ConstraintTerm, +) +from antares.model.link import LinkUi, LinkProperties, LinkStyle +from antares.model.renewable import ( + RenewableClusterProperties, + RenewableClusterGroup, + TimeSeriesInterpretation, +) +from antares.model.settings import StudySettings, GeneralProperties, PlaylistData +from antares.model.settings.general import Mode +from antares.model.st_storage import ( + STStorageProperties, + STStorageGroup, + STStorageMatrixName, +) +from antares.model.thermal import ThermalClusterProperties, ThermalClusterGroup + +from antares.model.area import AreaUi, AreaProperties, AdequacyPatchMode, FilterOption +from tests.integration.antares_web_desktop import AntaresWebDesktop + + +# todo add integration tests for matrices +class TestWebClient: + def test_creation_lifecycle(self): + app = AntaresWebDesktop() + app.wait_for_server_to_start() + + api_config = APIconf(api_host=app.url, token="", verify=False) + + try: + study = create_study_api("antares-craft-test", "880", api_config) + + # tests area creation with default values + area_name = "FR" + area_fr = study.create_area(area_name) + assert area_fr.name == area_name + assert area_fr.id == area_name.lower() + + # test upload load matrix + # Case that fails + wrong_load_matrix = pd.DataFrame(data=[[0]]) + with pytest.raises( + LoadMatrixUploadError, + match=f"Could not upload load matrix for area {area_fr.id}: Expected 8760 rows and received 1", + ): + area_fr.upload_load_matrix(wrong_load_matrix) + + # Case that succeeds + load_matrix = pd.DataFrame(data=np.zeros((8760, 1))) + area_fr.upload_load_matrix(load_matrix) + + # tests get load matrix + assert area_fr.get_load_matrix().equals(load_matrix) + + # tests area creation with ui values + area_ui = AreaUi(x=100, color_rgb=[255, 0, 0]) + area_name = "BE?" + area_be = study.create_area(area_name, ui=area_ui) + assert area_be.name == area_name + assert area_be.id == "be" + area_ui = area_be.ui + assert area_ui.x == area_ui.x + assert area_ui.color_rgb == area_ui.color_rgb + + # tests area creation with properties + properties = AreaProperties() + properties.energy_cost_spilled = 100 + properties.adequacy_patch_mode = AdequacyPatchMode.INSIDE + properties.filter_synthesis = [ + FilterOption.HOURLY, + FilterOption.DAILY, + FilterOption.HOURLY, + ] + area_name = "DE" + area_de = study.create_area(area_name, properties=properties) + assert area_de.properties.energy_cost_spilled == 100 + assert area_de.properties.adequacy_patch_mode == AdequacyPatchMode.INSIDE + assert area_de.properties.filter_synthesis == { + FilterOption.HOURLY, + FilterOption.DAILY, + } + + # tests link creation with default values + link_de_fr = study.create_link(area_from=area_de, area_to=area_fr) + assert link_de_fr.area_from == area_de + assert link_de_fr.area_to == area_fr + assert link_de_fr.name == f"{area_de.id} / {area_fr.id}" + + # tests link creation with ui and properties + link_ui = LinkUi(colorr=44) + link_properties = LinkProperties(hurdles_cost=True) + link_properties.filter_year_by_year = [FilterOption.HOURLY] + link_be_fr = study.create_link( + area_from=area_be, + area_to=area_fr, + ui=link_ui, + properties=link_properties, + ) + assert link_be_fr.ui.colorr == 44 + assert link_be_fr.properties.hurdles_cost + assert link_be_fr.properties.filter_year_by_year == {FilterOption.HOURLY} + + # asserts study contains all links and areas + assert study.get_areas() == { + area_be.id: area_be, + area_fr.id: area_fr, + area_de.id: area_de, + } + assert study.get_links() == { + link_be_fr.name: link_be_fr, + link_de_fr.name: link_de_fr, + } + + # test thermal cluster creation with default values + thermal_name = "Cluster_test %?" + thermal_fr = area_fr.create_thermal_cluster(thermal_name) + assert thermal_fr.name == thermal_name.lower() + # AntaresWeb has id issues for thermal/renewable clusters, + # so we force the name in lowercase to avoid issues. + assert thermal_fr.id == "cluster_test" + + # test thermal cluster creation with properties + thermal_name = "gaz_be" + thermal_properties = ThermalClusterProperties(efficiency=55) + thermal_properties.group = ThermalClusterGroup.GAS + thermal_be = area_be.create_thermal_cluster(thermal_name, thermal_properties) + properties = thermal_be.properties + assert properties.efficiency == 55 + assert properties.group == ThermalClusterGroup.GAS + + # test thermal cluster creation with prepro_modulation matrices + thermal_name = "matrices_be" + prepro_modulation_matrix = pd.DataFrame(data=np.ones((8760, 4))) + modulation_matrix = pd.DataFrame(data=np.ones((8760, 4))) + series_matrix = pd.DataFrame(data=np.ones((8760, 4))) + CO2Cost_matrix = pd.DataFrame(data=np.ones((8760, 1))) + fuelCost_matrix = pd.DataFrame(data=np.ones((8760, 1))) + + # Case that succeeds + thermal_value_be = area_fr.create_thermal_cluster_with_matrices( + cluster_name=thermal_name, + parameters=thermal_properties, + prepro=prepro_modulation_matrix, + modulation=modulation_matrix, + series=series_matrix, + CO2Cost=CO2Cost_matrix, + fuelCost=fuelCost_matrix, + ) + + prepro = thermal_value_be.get_prepro_data_matrix() + modulation = thermal_value_be.get_prepro_modulation_matrix() + series = thermal_value_be.get_series_matrix() + CO2 = thermal_value_be.get_co2_cost_matrix() + fuel = thermal_value_be.get_fuel_cost_matrix() + + # tests get thermal matrix + assert prepro.equals(prepro_modulation_matrix) + assert modulation.equals(modulation_matrix) + assert series.equals(series_matrix) + assert CO2.equals(CO2Cost_matrix) + assert fuel.equals(fuelCost_matrix) + + # test renewable cluster creation with default values + renewable_name = "cluster_test %?" + renewable_fr = area_fr.create_renewable_cluster(renewable_name, None, None) + assert renewable_fr.name == renewable_name + assert renewable_fr.id == "cluster_test" + + # test renewable cluster creation with properties + renewable_name = "wind_onshore" + renewable_properties = RenewableClusterProperties(enabled=False) + renewable_properties.group = RenewableClusterGroup.WIND_ON_SHORE + renewable_onshore = area_fr.create_renewable_cluster(renewable_name, renewable_properties, None) + properties = renewable_onshore.properties + assert not properties.enabled + assert properties.group == RenewableClusterGroup.WIND_ON_SHORE + + # test short term storage creation with default values + st_storage_name = "cluster_test %?" + storage_fr = area_fr.create_st_storage(st_storage_name) + assert storage_fr.name == st_storage_name + assert storage_fr.id == "cluster_test" + + # test short term storage creation with properties + st_storage_name = "wind_onshore" + storage_properties = STStorageProperties(reservoir_capacity=0.5) + storage_properties.group = STStorageGroup.BATTERY + battery_fr = area_fr.create_st_storage(st_storage_name, storage_properties) + properties = battery_fr.properties + assert properties.reservoir_capacity == 0.5 + assert properties.group == STStorageGroup.BATTERY + + # tests upload matrix for short term storage. + # Case that fails + wrong_matrix = pd.DataFrame(data=[[0]]) + with pytest.raises( + STStorageMatrixUploadError, + match=f"Could not upload {STStorageMatrixName.INFLOWS.value} matrix for storage {battery_fr.id}" + f" inside area {area_fr.id}", + ): + battery_fr.upload_storage_inflows(wrong_matrix) + + # Case that succeeds + injection_matrix = pd.DataFrame(data=np.zeros((8760, 1))) + battery_fr.upload_pmax_injection(injection_matrix) + + # tests get pmax_injection matrix + assert battery_fr.get_pmax_injection().equals(injection_matrix) + + # asserts areas contains the clusters + short term storages + assert area_be.get_thermals() == {thermal_be.id: thermal_be} + assert area_fr.get_thermals() == { + thermal_fr.id: thermal_fr, + thermal_value_be.id: thermal_value_be, + } + assert area_be.get_renewables() == {} + assert area_fr.get_renewables() == { + renewable_onshore.id: renewable_onshore, + renewable_fr.id: renewable_fr, + } + assert area_be.get_st_storages() == {} + assert area_fr.get_st_storages() == { + battery_fr.id: battery_fr, + storage_fr.id: storage_fr, + } + + # test binding constraint creation without terms + properties = BindingConstraintProperties(enabled=False) + properties.group = "group_1" + constraint_1 = study.create_binding_constraint(name="bc_1", properties=properties) + assert constraint_1.name == "bc_1" + assert not constraint_1.properties.enabled + assert constraint_1.properties.group == "group_1" + assert constraint_1.get_terms() == {} + + # test binding constraint creation with terms + link_data = LinkData(area1=area_be.id, area2=area_fr.id) + link_term_2 = ConstraintTerm(data=link_data, weight=2) + cluster_data = ClusterData(area=area_fr.id, cluster=thermal_fr.id) + cluster_term = ConstraintTerm(data=cluster_data, weight=4.5, offset=3) + terms = [link_term_2, cluster_term] + constraint_2 = study.create_binding_constraint(name="bc_2", terms=terms) + assert constraint_2.name == "bc_2" + assert constraint_2.get_terms() == { + link_term_2.id: link_term_2, + cluster_term.id: cluster_term, + } + + # test constraint creation with matrices + # Case that fails + wrong_matrix = pd.DataFrame(data=(np.ones((12, 1)))) + with pytest.raises( + BindingConstraintCreationError, + match="Could not create the binding constraint bc_3", + ): + study.create_binding_constraint(name="bc_3", less_term_matrix=wrong_matrix) + + # Other case with failure + with pytest.raises( + ConstraintMatrixUpdateError, + match=f"Could not update matrix eq for binding constraint {constraint_2.id}", + ): + constraint_2.update_equal_term_matrix(wrong_matrix) + + # Case that succeeds + properties = BindingConstraintProperties(operator="less") + matrix = pd.DataFrame(data=(np.ones((8784, 1)))) + constraint_3 = study.create_binding_constraint(name="bc_3", less_term_matrix=matrix, properties=properties) + assert constraint_3.get_less_term_matrix().equals(matrix) + + # test update constraint matrices + new_matrix = pd.DataFrame(data=(np.ones((8784, 1)))) + new_matrix.iloc[0, 0] = 72 + properties.operator = "equal" + constraint_3.update_properties(properties) + constraint_3.update_equal_term_matrix(new_matrix) + assert constraint_3.get_equal_term_matrix().equals(new_matrix) + + # test adding terms to a constraint + link_data = LinkData(area1=area_de.id, area2=area_fr.id) + link_term_1 = ConstraintTerm(data=link_data, weight=15) + cluster_data = ClusterData(area=area_be.id, cluster=thermal_be.id) + cluster_term = ConstraintTerm(data=cluster_data, weight=100) + terms = [link_term_1, cluster_term] + constraint_1.add_terms(terms) + assert constraint_1.get_terms() == { + link_term_1.id: link_term_1, + cluster_term.id: cluster_term, + } + + # asserts study contains the constraints + assert study.get_binding_constraints() == { + constraint_1.id: constraint_1, + constraint_2.id: constraint_2, + constraint_3.id: constraint_3, + } + + # test area property edition + new_props = AreaProperties() + new_props.adequacy_patch_mode = AdequacyPatchMode.VIRTUAL + area_fr.update_properties(new_props) + assert area_fr.properties.adequacy_patch_mode == AdequacyPatchMode.VIRTUAL + + # test area ui edition + new_ui = AreaUi() + new_ui.x = 100 + area_fr.update_ui(new_ui) + assert area_fr.ui.x == 100 + + # test link property edition + new_props = LinkProperties() + new_props.hurdles_cost = False + link_be_fr.update_properties(new_props) + assert not link_be_fr.properties.hurdles_cost + + # tests link ui edition + new_ui = LinkUi() + new_ui.link_style = LinkStyle.PLAIN + link_be_fr.update_ui(new_ui) + assert link_be_fr.ui.link_style == LinkStyle.PLAIN + + # tests thermal properties update + new_props = ThermalClusterProperties() + new_props.group = ThermalClusterGroup.NUCLEAR + thermal_fr.update_properties(new_props) + assert thermal_fr.properties.group == ThermalClusterGroup.NUCLEAR + + # tests renewable properties update + new_props = RenewableClusterProperties() + new_props.ts_interpretation = TimeSeriesInterpretation.POWER_GENERATION + renewable_onshore.update_properties(new_props) + assert renewable_onshore.properties.ts_interpretation == TimeSeriesInterpretation.POWER_GENERATION + + # tests short term storage properties update + new_props = STStorageProperties() + new_props.group = STStorageGroup.PONDAGE + battery_fr.update_properties(new_props) + assert battery_fr.properties.group == STStorageGroup.PONDAGE + + # tests constraint properties update + new_props = BindingConstraintProperties() + new_props.group = "another_group" + constraint_1.update_properties(new_props) + assert constraint_1.properties.group == "another_group" + + # tests constraint deletion + study.delete_binding_constraint(constraint_1) + assert constraint_1.id not in study.get_binding_constraints() + + # tests constraint term deletion + constraint_2.delete_term(link_term_2) + assert link_term_2.id not in constraint_2.get_terms() + + # tests link deletion + study.delete_link(link_de_fr) + assert link_de_fr.name not in study.get_links() + + # tests thermal cluster deletion + area_be.delete_thermal_cluster(thermal_be) + assert area_be.get_thermals() == {} + + # tests renewable cluster deletion + area_fr.delete_renewable_clusters([renewable_onshore, renewable_fr]) + assert area_fr.get_renewables() == {} + + # tests short term storage deletion + area_fr.delete_st_storage(battery_fr) + assert battery_fr.id not in study.get_areas().get(area_be.id).get_st_storages() + + # tests area deletion error + with pytest.raises( + AreaDeletionError, + match=f"Could not delete the area fr: Area '{area_fr.id}' is not allowed " + f"to be deleted, because it is referenced in " + f"the following binding constraints:\n1- 'bc_2'.", + ): + study.delete_area(area_fr) + + # tests area deletion success + study.delete_area(area_de) + assert area_de.id not in study.get_areas() + + # test study creation with settings + settings = StudySettings() + settings.general_properties = GeneralProperties(mode="Adequacy") + settings.general_properties.year_by_year = False + settings.playlist = {"1": {"status": False, "weight": 1}} + new_study = create_study_api("second_study", "880", api_config, settings) + settings = new_study.get_settings() + assert settings.general_properties.mode == Mode.ADEQUACY + assert not settings.general_properties.year_by_year + assert settings.playlist == {"1": PlaylistData(status=False, weight=1)} + + # tests update settings + new_settings = StudySettings() + # Really important note. To instance such object with value you must respect camel case. + # Another way to do so is to instance the object and then fill its values + new_settings.general_properties = GeneralProperties(nbYears=4) + new_settings.advanced_properties = AdvancedProperties() + new_settings.advanced_properties.unit_commitment_mode = UnitCommitmentMode.MILP + new_study.update_settings(new_settings) + assert new_study.get_settings().general_properties.mode == Mode.ADEQUACY + assert new_study.get_settings().general_properties.nb_years == 4 + assert new_study.get_settings().advanced_properties.unit_commitment_mode == UnitCommitmentMode.MILP + + old_settings = new_study.get_settings() + empty_settings = StudySettings() + new_study.update_settings(empty_settings) + assert old_settings == new_study.get_settings() + + finally: + app.kill()