Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into read_thermals_local
Browse files Browse the repository at this point in the history
  • Loading branch information
killian-scalian committed Dec 5, 2024
2 parents 37a4d7f + b5bf3a2 commit 2812710
Show file tree
Hide file tree
Showing 16 changed files with 285 additions and 98 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ jobs:
- name: Performs Ubuntu tests
if: matrix.os != 'windows-latest'
run: tox -p
run: |
tox -e lint-ci
tox -p
- name: Performs Windows tests
if: matrix.os == 'windows-latest'
Expand Down
51 changes: 44 additions & 7 deletions src/antares/model/study.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,23 @@ def _directory_not_exists(local_path: Path) -> None:
)


def read_study_api(api_config: APIconf, study_id: str) -> "Study":
session = api_config.set_up_api_conf()
wrapper = RequestWrapper(session)
base_url = f"{api_config.get_host()}/api/v1"
json_study = wrapper.get(f"{base_url}/studies/{study_id}").json()

study_name = json_study.pop("name")
study_version = str(json_study.pop("version"))

study_settings = _returns_study_settings(base_url, study_id, wrapper, False, None)
study = Study(study_name, study_version, ServiceFactory(api_config, study_id, study_name), study_settings)

study.read_areas()

return study


class Study:
def __init__(
self,
Expand All @@ -188,16 +205,20 @@ def __init__(
self._settings = DefaultStudySettings.model_validate(settings if settings is not None else StudySettings())
self._areas: Dict[str, Area] = dict()
self._links: Dict[str, Link] = dict()
self._binding_constraints: Dict[str, BindingConstraint] = dict()

@property
def service(self) -> BaseStudyService:
return self._study_service

def read_areas(self) -> list[Area]:
return self._area_service.read_areas()

def read_links(self) -> list[Link]:
return self._link_service.read_links(self._area_service)
"""
Synchronize the internal study object with the actual object written in an antares study
Returns: the synchronized area list
"""
area_list = self._area_service.read_areas()
self._areas = {area.id: area for area in area_list}
return area_list

def get_areas(self) -> MappingProxyType[str, Area]:
return MappingProxyType(dict(sorted(self._areas.items())))
Expand All @@ -209,7 +230,7 @@ def get_settings(self) -> DefaultStudySettings:
return self._settings

def get_binding_constraints(self) -> MappingProxyType[str, BindingConstraint]:
return MappingProxyType(self._binding_constraints_service.binding_constraints)
return MappingProxyType(self._binding_constraints)

def create_area(
self, area_name: str, *, properties: Optional[AreaProperties] = None, ui: Optional[AreaUi] = None
Expand Down Expand Up @@ -249,9 +270,25 @@ def create_binding_constraint(
equal_term_matrix: Optional[pd.DataFrame] = None,
greater_term_matrix: Optional[pd.DataFrame] = None,
) -> BindingConstraint:
return self._binding_constraints_service.create_binding_constraint(
"""
Create a new binding constraint and store it.
Args:
name (str): The name of the binding constraint.
properties (Optional[BindingConstraintProperties]): Optional properties for the constraint.
terms (Optional[List[ConstraintTerm]]): Optional list of terms for the constraint.
less_term_matrix (Optional[pd.DataFrame]): Optional less-than term matrix.
equal_term_matrix (Optional[pd.DataFrame]): Optional equality term matrix.
greater_term_matrix (Optional[pd.DataFrame]): Optional greater-than term matrix.
Returns:
BindingConstraint: The created binding constraint.
"""
binding_constraint = self._binding_constraints_service.create_binding_constraint(
name, properties, terms, less_term_matrix, equal_term_matrix, greater_term_matrix
)
self._binding_constraints[binding_constraint.id] = binding_constraint
return binding_constraint

def update_settings(self, settings: StudySettings) -> None:
new_settings = self._study_service.update_study_settings(settings)
Expand All @@ -260,7 +297,7 @@ def update_settings(self, settings: StudySettings) -> None:

def delete_binding_constraint(self, constraint: BindingConstraint) -> None:
self._study_service.delete_binding_constraint(constraint)
self._binding_constraints_service.binding_constraints.pop(constraint.id)
self._binding_constraints.pop(constraint.id)

def delete(self, children: bool = False) -> None:
self._study_service.delete(children)
Expand Down
1 change: 0 additions & 1 deletion src/antares/service/api_services/area_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,7 +560,6 @@ def read_areas(self) -> List[Area]:
base_api_url = f"{self._base_url}/studies/{self.study_id}/areas"
ui_url = "ui=true"
url_properties_form = "properties/form"

json_resp = self._wrapper.get(base_api_url + "?" + ui_url).json()
for area in json_resp:
area_url = base_api_url + "/" + f"{area}/"
Expand Down
2 changes: 0 additions & 2 deletions src/antares/service/api_services/binding_constraint_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ def __init__(self, config: APIconf, study_id: str) -> None:
self.study_id = study_id
self._wrapper = RequestWrapper(self.api_config.set_up_api_conf())
self._base_url = f"{self.api_config.get_host()}/api/v1"
self.binding_constraints = {}

def create_binding_constraint(
self,
Expand Down Expand Up @@ -105,7 +104,6 @@ def create_binding_constraint(
raise BindingConstraintCreationError(name, e.message) from e

constraint = BindingConstraint(name, self, bc_properties, bc_terms)
self.binding_constraints[constraint.id] = constraint

return constraint

Expand Down
9 changes: 1 addition & 8 deletions src/antares/service/api_services/renewable_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,7 @@ def update_renewable_properties(

def get_renewable_matrix(self, cluster_id: str, area_id: str) -> pd.DataFrame:
try:
path = (
PurePosixPath("input")
/ "renewables"
/ "series"
/ f"{area_id}"
/ f"{cluster_id}"
/ "series"
)
path = PurePosixPath("input") / "renewables" / "series" / f"{area_id}" / f"{cluster_id}" / "series"
return get_matrix(f"{self._base_url}/studies/{self.study_id}/raw?path={path}", self._wrapper)
except APIError as e:
raise RenewableMatrixDownloadError(area_id, cluster_id, e.message) from e
Expand Down
4 changes: 1 addition & 3 deletions src/antares/service/base_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,9 +525,7 @@ def update_renewable_properties(
pass

@abstractmethod
def get_renewable_matrix(
self, cluster_id: str, area_id: str
) -> pd.DataFrame:
def get_renewable_matrix(self, cluster_id: str, area_id: str) -> pd.DataFrame:
"""
Args:
cluster_id: renewable cluster id to retrieve matrix
Expand Down
4 changes: 2 additions & 2 deletions src/antares/service/local_services/area_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def get_load_matrix(self, area: Area) -> pd.DataFrame:
return read_timeseries(TimeSeriesFileType.LOAD, self.config.study_path, area_id=area.id)

def get_solar_matrix(self, area: Area) -> pd.DataFrame:
return read_timeseries(TimeSeriesFileType.SOLAR, self.config.study_path, area_id=area.id)
return read_timeseries(TimeSeriesFileType.SOLAR, self.config.study_path, area_id=area.id)

def get_wind_matrix(self, area: Area) -> pd.DataFrame:
return read_timeseries(TimeSeriesFileType.WIND, self.config.study_path, area_id=area.id)
Expand All @@ -330,7 +330,7 @@ def get_reserves_matrix(self, area: Area) -> pd.DataFrame:

def get_misc_gen_matrix(self, area: Area) -> pd.DataFrame:
return read_timeseries(TimeSeriesFileType.MISC_GEN, self.config.study_path, area_id=area.id)

def read_areas(self) -> List[Area]:
local_path = self.config.local_path
areas_path = local_path / self.study_name / "input" / "areas"
Expand Down
101 changes: 81 additions & 20 deletions src/antares/service/local_services/binding_constraint_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
# SPDX-License-Identifier: MPL-2.0
#
# This file is part of the Antares project.

from typing import Any, Optional

import numpy as np
Expand All @@ -22,6 +21,7 @@
BindingConstraintFrequency,
BindingConstraintOperator,
BindingConstraintProperties,
BindingConstraintPropertiesLocal,
ConstraintMatrixName,
ConstraintTerm,
)
Expand All @@ -37,7 +37,6 @@ def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) -
self.config = config
self.study_name = study_name
self.ini_file = IniFile(self.config.study_path, IniFileTypes.BINDING_CONSTRAINTS_INI)
self.binding_constraints = {}

def create_binding_constraint(
self,
Expand All @@ -54,17 +53,16 @@ def create_binding_constraint(
properties=properties,
terms=terms,
)
if constraint.id in self.binding_constraints:
constraint.properties = constraint.local_properties.yield_binding_constraint_properties()

current_ini_content = self.ini_file.ini_dict_binding_constraints or {}
if any(values.get("id") == constraint.id for values in current_ini_content.values()):
raise BindingConstraintCreationError(
constraint_name=name, message=f"A binding constraint with the name '{name}' already exists."
constraint_name=name, message=f"A binding constraint with the name {name} already exists."
)
constraint.properties = constraint.local_properties.yield_binding_constraint_properties()

# Add binding constraints
self.binding_constraints[constraint.id] = constraint
self._write_binding_constraint_ini()
self._write_binding_constraint_ini(constraint.properties, name, name, terms)

# Add constraint time series
self._store_time_series(constraint, less_term_matrix, equal_term_matrix, greater_term_matrix)

return constraint
Expand Down Expand Up @@ -103,21 +101,84 @@ def _check_if_empty_ts(time_step: BindingConstraintFrequency, time_series: Optio
time_series_length = (365 * 24 + 24) if time_step == BindingConstraintFrequency.HOURLY else 366
return time_series if time_series is not None else pd.DataFrame(np.zeros([time_series_length, 1]))

def _write_binding_constraint_ini(self) -> None:
binding_constraints_ini_content = {
idx: idx_constraint.local_properties.list_ini_fields
for idx, idx_constraint in enumerate(self.binding_constraints.values())
}
self.ini_file.ini_dict = binding_constraints_ini_content
def _write_binding_constraint_ini(
self,
properties: BindingConstraintProperties,
constraint_name: str,
constraint_id: str,
terms: Optional[list[ConstraintTerm]] = None,
) -> None:
"""
Write or update a binding constraint in the INI file.
"""

current_ini_content = self.ini_file.ini_dict_binding_constraints or {}

existing_section = next(
(section for section, values in current_ini_content.items() if values.get("name") == constraint_name),
None,
)

if existing_section:
existing_terms = current_ini_content[existing_section]

serialized_terms = {term.id: term.weight_offset() for term in terms} if terms else {}

existing_terms.update(serialized_terms) # type: ignore
current_ini_content[existing_section] = existing_terms

# Persist the updated INI content
self.ini_file.write_ini_file()
else:
terms_dict = {term.id: term for term in terms} if terms else {}

full_properties = BindingConstraintPropertiesLocal(
constraint_name=constraint_name,
constraint_id=constraint_id,
terms=terms_dict,
**properties.model_dump(),
)

section_index = len(current_ini_content)
current_ini_content[str(section_index)] = full_properties.list_ini_fields

self.ini_file.ini_dict_binding_constraints = current_ini_content
self.ini_file.write_ini_file()

def add_constraint_terms(self, constraint: BindingConstraint, terms: list[ConstraintTerm]) -> list[ConstraintTerm]:
new_terms = constraint.local_properties.terms | {
term.id: term for term in terms if term.id not in constraint.get_terms()
}
"""
Add terms to a binding constraint and update the INI file.
Args:
constraint (BindingConstraint): The binding constraint to update.
terms (list[ConstraintTerm]): A list of new terms to add.
Returns:
list[ConstraintTerm]: The updated list of terms.
"""

new_terms = constraint.local_properties.terms.copy()

for term in terms:
if term.id in constraint.get_terms():
raise BindingConstraintCreationError(
constraint_name=constraint.name, message=f"Duplicate term found: {term.id}"
)
new_terms[term.id] = term

constraint.local_properties.terms = new_terms
self._write_binding_constraint_ini()
return list(new_terms.values())

terms_values = list(new_terms.values())

self._write_binding_constraint_ini(
properties=constraint.properties,
constraint_name=constraint.name,
constraint_id=constraint.id,
terms=terms_values,
)

return terms_values

def delete_binding_constraint_term(self, constraint_id: str, term_id: str) -> None:
raise NotImplementedError
Expand Down
22 changes: 12 additions & 10 deletions src/antares/service/local_services/renewable_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,11 @@ def update_renewable_properties(
self, renewable_cluster: RenewableCluster, properties: RenewableClusterProperties
) -> RenewableClusterProperties:
raise NotImplementedError


def get_renewable_matrix(
self,
cluster_id: str,
area_id: str
) -> pd.DataFrame:
return read_timeseries(TimeSeriesFileType.RENEWABLE_DATA_SERIES, self.config.study_path, area_id=area_id, cluster_id=cluster_id)

def get_renewable_matrix(self, cluster_id: str, area_id: str) -> pd.DataFrame:
return read_timeseries(
TimeSeriesFileType.RENEWABLE_DATA_SERIES, self.config.study_path, area_id=area_id, cluster_id=cluster_id
)

def read_renewables(self, area_id: str) -> List[RenewableCluster]:
renewable_dict = IniFile(self.config.study_path, IniFileTypes.RENEWABLES_LIST_INI, area_name=area_id).ini_dict
Expand All @@ -56,6 +52,12 @@ def read_renewables(self, area_id: str) -> List[RenewableCluster]:
nominal_capacity=renewable_dict[renewable_cluster]["nominalcapacity"],
ts_interpretation=renewable_dict[renewable_cluster]["ts-interpretation"],
)
renewables_clusters.append(RenewableCluster(renewable_service=self, area_id=area_id, name=renewable_dict[renewable_cluster]["name"], properties=renewable_properties.yield_renewable_cluster_properties()))
renewables_clusters.append(
RenewableCluster(
renewable_service=self,
area_id=area_id,
name=renewable_dict[renewable_cluster]["name"],
properties=renewable_properties.yield_renewable_cluster_properties(),
)
)
return renewables_clusters

13 changes: 13 additions & 0 deletions src/antares/tools/ini_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,19 @@ def ini_dict(self, new_ini_dict: dict[str, dict[str, str]]) -> None:
self._ini_contents = CustomRawConfigParser()
self._ini_contents.read_dict(new_ini_dict)

@property
def ini_dict_binding_constraints(self) -> dict[str, dict[str, str]]:
return {section: dict(self._ini_contents[section]) for section in self._ini_contents.sections()}

@ini_dict_binding_constraints.setter
def ini_dict_binding_constraints(self, new_ini_dict: dict[str, dict[str, str]]) -> None:
"""Set INI file contents for binding constraints."""
self._ini_contents = CustomRawConfigParser()
for index, values in enumerate(new_ini_dict.values()):
self._ini_contents.add_section(str(index))
for key, value in values.items():
self._ini_contents.set(str(index), key, value)

@property
def parsed_ini(self) -> CustomRawConfigParser:
"""Ini contents as a CustomRawConfigParser"""
Expand Down
11 changes: 9 additions & 2 deletions src/antares/tools/matrix_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,14 @@ def df_read(path: Path) -> pd.DataFrame:
return pd.read_csv(path, sep="\t", header=None)


def read_timeseries(ts_file_type: TimeSeriesFileType, study_path: Path, area_id: Optional[str] = None, constraint_id: Optional[str] = None, cluster_id: Optional[str] = None, second_area_id: Optional[str] = None) -> pd.DataFrame:
def read_timeseries(
ts_file_type: TimeSeriesFileType,
study_path: Path,
area_id: Optional[str] = None,
constraint_id: Optional[str] = None,
cluster_id: Optional[str] = None,
second_area_id: Optional[str] = None,
) -> pd.DataFrame:
file_path = study_path / (
ts_file_type.value
if not (area_id or constraint_id or cluster_id or second_area_id)
Expand All @@ -53,4 +60,4 @@ def read_timeseries(ts_file_type: TimeSeriesFileType, study_path: Path, area_id:
else:
_time_series = pd.DataFrame()

return _time_series
return _time_series
Loading

0 comments on commit 2812710

Please sign in to comment.