diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index 6bcc81f6d..58ce5870e 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -66,12 +66,16 @@ jobs: run: | python -m pip install pytest pytest-notebook python -m pytest --runslow --runonlinux --disable-warnings --color=yes -v + env: + TOEP_TOKEN_KH: ${{ secrets.TOEP_TOKEN_KH }} - name: Run tests Windows if: runner.os == 'Windows' run: | python -m pip install pytest pytest-notebook python -m pytest --runslow --disable-warnings --color=yes -v + env: + TOEP_TOKEN_KH: ${{ secrets.TOEP_TOKEN_KH }} - name: Run tests, coverage and send to coveralls if: runner.os == 'Linux' && matrix.python-version == 3.9 && matrix.name-suffix == 'coverage' @@ -80,5 +84,6 @@ jobs: coverage run --source=edisgo -m pytest --runslow --runonlinux --disable-warnings --color=yes -v coveralls env: + TOEP_TOKEN_KH: ${{ secrets.TOEP_TOKEN_KH }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_SERVICE_NAME: github diff --git a/.gitignore b/.gitignore index e13855310..b02956662 100644 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,5 @@ eDisGo.egg-info/ /edisgo/opf/opf_solutions/*.json /edisgo/opf/eDisGo_OPF.jl/.vscode .vscode/settings.json + +*TOEP_TOKEN.* diff --git a/doc/whatsnew/v0-3-0.rst b/doc/whatsnew/v0-3-0.rst index cc0cae1fa..fb1b0bb91 100644 --- a/doc/whatsnew/v0-3-0.rst +++ b/doc/whatsnew/v0-3-0.rst @@ -28,5 +28,7 @@ Changes * Move function to assign feeder to Topology class and add methods to the Grid class to get information on the feeders `#360 `_ * Added a storage operation strategy where the storage is charged when PV feed-in is higher than electricity demand of the household and discharged when electricity demand exceeds PV generation `#386 `_ * Added an estimation of the voltage deviation over a cable when selecting a suitable cable to connect a new component `#411 `_ -* Added clipping of heat pump electrical power at its maximum value #428 +* Added clipping of heat pump electrical power at its maximum value `#428 `_ * Loading predefined time series now automatically sets the timeindex to the default year of the database if it is empty. `#457 `_ +* Made OEP database call optional in get_database_alias_dictionaries, allowing setup without OEP when using an alternative eGon-data database. `#451 `_ +* Fixed database import issues by addressing table naming assumptions and added support for external SSH tunneling in eGon-data configurations. `#451 `_ diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index 786095d47..264617629 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -72,6 +72,11 @@ class EDisGo: ---------- ding0_grid : :obj:`str` Path to directory containing csv files of network to be loaded. + engine : :sqlalchemy:`sqlalchemy.Engine` or None + Database engine for connecting to the `OpenEnergy DataBase OEDB + `_ or other eGon-data + databases. Defaults to the OEDB engine. Can be set to None if no scenario is to + be loaded. generator_scenario : None or :obj:`str`, optional If None, the generator park of the imported grid is kept as is. Otherwise defines which scenario of future generator park to use @@ -159,8 +164,10 @@ class EDisGo: """ def __init__(self, **kwargs): + # Set database engine for future scenarios + self.engine: Engine | None = kwargs.pop("engine", toep_engine()) # load configuration - self._config = Config(**kwargs) + self._config = Config(engine=self.engine, **kwargs) # instantiate topology object and load grid data self.topology = Topology(config=self.config) @@ -419,12 +426,9 @@ def set_time_series_active_power_predefined( Technology- and weather cell-specific hourly feed-in time series are obtained from the `OpenEnergy DataBase - `_. See - :func:`edisgo.io.timeseries_import.feedin_oedb` for more information. - - This option requires that the parameter `engine` is provided in case - new ding0 grids with geo-referenced LV grids are used. For further - settings, the parameter `timeindex` can also be provided. + `_ or other eGon-data + databases. See :func:`edisgo.io.timeseries_import.feedin_oedb` for more + information. * :pandas:`pandas.DataFrame` @@ -537,9 +541,6 @@ def set_time_series_active_power_predefined( Other Parameters ------------------ - engine : :sqlalchemy:`sqlalchemy.Engine` - Database engine. This parameter is only required in case - `conventional_loads_ts` or `fluctuating_generators_ts` is 'oedb'. scenario : str Scenario for which to retrieve demand data. Possible options are 'eGon2035' and 'eGon100RE'. This parameter is only required in case @@ -600,7 +601,7 @@ def set_time_series_active_power_predefined( self, fluctuating_generators_ts, fluctuating_generators_names, - engine=kwargs.get("engine", toep_engine()), + engine=self.engine, timeindex=timeindex, ) if dispatchable_generators_ts is not None: @@ -615,7 +616,7 @@ def set_time_series_active_power_predefined( loads_ts_df = timeseries_import.electricity_demand_oedb( edisgo_obj=self, scenario=kwargs.get("scenario"), - engine=kwargs.get("engine", toep_engine()), + engine=self.engine, timeindex=timeindex, load_names=conventional_loads_names, ) @@ -1002,9 +1003,7 @@ def import_generators(self, generator_scenario=None, **kwargs): Other Parameters ---------------- kwargs : - In case you are using new ding0 grids, where the LV is geo-referenced, a - database engine needs to be provided through keyword argument `engine`. - In case you are using old ding0 grids, where the LV is not geo-referenced, + If you are using old ding0 grids, where the LV is not geo-referenced, you can check :func:`edisgo.io.generators_import.oedb_legacy` for possible keyword arguments. @@ -1016,7 +1015,7 @@ def import_generators(self, generator_scenario=None, **kwargs): else: generators_import.oedb( edisgo_object=self, - engine=kwargs.get("engine", toep_engine()), + engine=self.engine, scenario=generator_scenario, ) @@ -1950,9 +1949,8 @@ def _aggregate_time_series(attribute, groups, naming): def import_electromobility( self, - data_source: str, + data_source: str = "oedb", scenario: str = None, - engine: Engine = None, charging_processes_dir: PurePath | str = None, potential_charging_points_dir: PurePath | str = None, import_electromobility_data_kwds=None, @@ -1994,10 +1992,8 @@ def import_electromobility( * "oedb" Electromobility data is obtained from the `OpenEnergy DataBase - `_. - - This option requires that the parameters `scenario` and `engine` are - provided. + `_ or other eGon-data + databases depending on the provided Engine. * "directory" @@ -2007,9 +2003,6 @@ def import_electromobility( scenario : str Scenario for which to retrieve electromobility data in case `data_source` is set to "oedb". Possible options are "eGon2035" and "eGon100RE". - engine : :sqlalchemy:`sqlalchemy.Engine` - Database engine. Needs to be provided in case `data_source` is set to - "oedb". charging_processes_dir : str or pathlib.PurePath Directory holding data on charging processes (standing times, charging demand, etc. per vehicle), including metadata, from SimBEV. @@ -2071,7 +2064,7 @@ def import_electromobility( import_electromobility_from_oedb( self, scenario=scenario, - engine=engine, + engine=self.engine, **import_electromobility_data_kwds, ) elif data_source == "directory": @@ -2164,10 +2157,11 @@ def apply_charging_strategy(self, strategy="dumb", **kwargs): """ charging_strategy(self, strategy=strategy, **kwargs) - def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None): + def import_heat_pumps(self, scenario, timeindex=None, import_types=None): """ - Gets heat pump data for specified scenario from oedb and integrates the heat - pumps into the grid. + Gets heat pump data for specified scenario from the OEDB or other eGon-data + databases depending on the provided Engine and integrates the heat pumps into + the grid. Besides heat pump capacity the heat pump's COP and heat demand to be served are as well retrieved. @@ -2222,8 +2216,6 @@ def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None) scenario : str Scenario for which to retrieve heat pump data. Possible options are 'eGon2035' and 'eGon100RE'. - engine : :sqlalchemy:`sqlalchemy.Engine` - Database engine. timeindex : :pandas:`pandas.DatetimeIndex` or None Specifies time steps for which to set COP and heat demand data. Leap years can currently not be handled. In case the given @@ -2264,7 +2256,7 @@ def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None) year = tools.get_year_based_on_scenario(scenario) return self.import_heat_pumps( scenario, - engine, + self.engine, timeindex=pd.date_range(f"1/1/{year}", periods=8760, freq="H"), import_types=import_types, ) @@ -2272,7 +2264,7 @@ def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None) integrated_heat_pumps = import_heat_pumps_oedb( edisgo_object=self, scenario=scenario, - engine=engine, + engine=self.engine, import_types=import_types, ) if len(integrated_heat_pumps) > 0: @@ -2280,7 +2272,7 @@ def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None) self, "oedb", heat_pump_names=integrated_heat_pumps, - engine=engine, + engine=self.engine, scenario=scenario, timeindex=timeindex, ) @@ -2288,7 +2280,7 @@ def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None) self, "oedb", heat_pump_names=integrated_heat_pumps, - engine=engine, + engine=self.engine, timeindex=timeindex, ) @@ -2336,7 +2328,7 @@ def apply_heat_pump_operating_strategy( """ hp_operating_strategy(self, strategy=strategy, heat_pump_names=heat_pump_names) - def import_dsm(self, scenario: str, engine: Engine, timeindex=None): + def import_dsm(self, scenario: str, timeindex=None): """ Gets industrial and CTS DSM profiles from the `OpenEnergy DataBase `_. @@ -2355,8 +2347,6 @@ def import_dsm(self, scenario: str, engine: Engine, timeindex=None): scenario : str Scenario for which to retrieve DSM data. Possible options are 'eGon2035' and 'eGon100RE'. - engine : :sqlalchemy:`sqlalchemy.Engine` - Database engine. timeindex : :pandas:`pandas.DatetimeIndex` or None Specifies time steps for which to get data. Leap years can currently not be handled. In case the given timeindex contains a leap year, the data will be @@ -2369,7 +2359,7 @@ def import_dsm(self, scenario: str, engine: Engine, timeindex=None): """ dsm_profiles = dsm_import.oedb( - edisgo_obj=self, scenario=scenario, engine=engine, timeindex=timeindex + edisgo_obj=self, scenario=scenario, engine=self.engine, timeindex=timeindex ) self.dsm.p_min = dsm_profiles["p_min"] self.dsm.p_max = dsm_profiles["p_max"] @@ -2379,7 +2369,6 @@ def import_dsm(self, scenario: str, engine: Engine, timeindex=None): def import_home_batteries( self, scenario: str, - engine: Engine, ): """ Gets home battery data for specified scenario and integrates the batteries into @@ -2390,7 +2379,8 @@ def import_home_batteries( between two scenarios: 'eGon2035' and 'eGon100RE'. The data is retrieved from the - `open energy platform `_. + `open energy platform `_ or other eGon-data + databases depending on the given Engine. The batteries are integrated into the grid (added to :attr:`~.network.topology.Topology.storage_units_df`) based on their building @@ -2407,14 +2397,12 @@ def import_home_batteries( scenario : str Scenario for which to retrieve home battery data. Possible options are 'eGon2035' and 'eGon100RE'. - engine : :sqlalchemy:`sqlalchemy.Engine` - Database engine. """ home_batteries_oedb( edisgo_obj=self, scenario=scenario, - engine=engine, + engine=self.engine, ) def plot_mv_grid_topology(self, technologies=False, **kwargs): diff --git a/edisgo/io/db.py b/edisgo/io/db.py index a2fc1ac8c..5b46f2d9f 100644 --- a/edisgo/io/db.py +++ b/edisgo/io/db.py @@ -1,6 +1,9 @@ from __future__ import annotations +import importlib.util import logging +import os +import re from contextlib import contextmanager from pathlib import Path @@ -149,17 +152,24 @@ def ssh_tunnel(cred: dict) -> str: return str(server.local_bind_port) -def engine(path: Path | str = None, ssh: bool = False) -> Engine: +def engine( + path: Path | str = None, ssh: bool = False, token: Path | str = None +) -> Engine: """ Engine for local or remote database. Parameters ---------- - path : str + path : str or pathlib.Path, optional (default=None) Path to configuration YAML file of egon-data database. - ssh : bool + ssh : bool (default=False) If True try to establish ssh tunnel from given information within the configuration YAML. If False try to connect to local database. + token : str or pathlib.Path, optional (default=None) + Token for database connection or path to text file containing token. + If empty the default token file in the config folder TOEP_TOKEN.txt + will be used. If the default token file is not found, no token + will be used and the connection will be established without token. Returns ------- @@ -169,9 +179,50 @@ def engine(path: Path | str = None, ssh: bool = False) -> Engine: """ if path is None: + # Github Actions KHs token + if "TOEP_TOKEN_KH" in os.environ: + token = os.environ["TOEP_TOKEN_KH"] + + read = True + else: + read = False + + if token is None: + spec = importlib.util.find_spec("edisgo") + token = Path(spec.origin).resolve().parent / "config" / "TOEP_TOKEN.txt" + + if token.is_file(): + logger.info(f"Getting OEP token from file {token}.") + + with open(token) as file: + token = file.read().strip() + + read = True + database_url = "toep.iks.cs.ovgu.de" + + msg = "" + + if not read: + msg = f"Token file {token} not found" + token = "" + # Check if the token format is valid + elif not re.match(r"^[a-f0-9]{40}$", token): + msg = ( + f"Invalid token format for token {token}. A 40 character " + f"hexadecimal string was expected" + ) + token = "" + + if msg: + logger.warning( + f"{msg}. Connecting to {database_url} without a user token. This may " + f"cause connection errors due to connection limitations. Consider " + f"setting up an OEP account and providing your user token." + ) + return create_engine( - "postgresql+oedialect://:@" f"{database_url}", + f"postgresql+oedialect://:{token}@{database_url}", echo=False, ) diff --git a/edisgo/tools/config.py b/edisgo/tools/config.py index 7b42fdf5c..6cf74c77c 100644 --- a/edisgo/tools/config.py +++ b/edisgo/tools/config.py @@ -130,6 +130,8 @@ class Config: """ def __init__(self, **kwargs): + self._engine = kwargs.get("engine", None) + if not kwargs.get("from_json", False): self._data = self.from_cfg(kwargs.get("config_path", "default")) else: @@ -164,13 +166,17 @@ def _set_db_mappings(self) -> None: """ Sets the database table and schema mappings by retrieving alias dictionaries. """ - name_mapping, schema_mapping = self.get_database_alias_dictionaries() + if self._engine is not None and "toep.iks.cs.ovgu.de" in self._engine.url.host: + name_mapping, schema_mapping = self.get_database_alias_dictionaries() + else: + name_mapping = schema_mapping = {} + self.db_table_mapping = name_mapping self.db_schema_mapping = schema_mapping def get_database_alias_dictionaries(self) -> tuple[dict[str, str], dict[str, str]]: """ - Retrieves the database alias dictionaries for table and schema mappings. + Retrieves the OEP database alias dictionaries for table and schema mappings. Returns ------- @@ -181,20 +187,16 @@ def get_database_alias_dictionaries(self) -> tuple[dict[str, str], dict[str, str - schema_mapping: A dictionary mapping source schema names to target schema names. """ - OEP_CONNECTION = "postgresql+oedialect://:@{platform}" - platform = "toep.iks.cs.ovgu.de" - conn_str = OEP_CONNECTION.format(platform=platform) - engine = sa.create_engine(conn_str) dictionary_schema_name = ( "model_draft" # Replace with the actual schema name if needed ) dictionary_module_name = f"saio.{dictionary_schema_name}" - register_schema(dictionary_schema_name, engine) + register_schema(dictionary_schema_name, self._engine) dictionary_table_name = "edut_00" dictionary_table = importlib.import_module(dictionary_module_name).__getattr__( dictionary_table_name ) - with session_scope_egon_data(engine) as session: + with session_scope_egon_data(self._engine) as session: query = session.query(dictionary_table) dictionary_entries = query.all() name_mapping = { @@ -228,22 +230,22 @@ def import_tables_from_oep( list of sqlalchemy.Table A list of SQLAlchemy Table objects corresponding to the imported tables. """ + tables = [] + if "toep" in engine.url.host: schema = self.db_schema_mapping.get(schema_name) saio.register_schema(schema, engine) - tables = [] for table in table_names: table = self.db_table_mapping.get(table) module_name = f"saio.{schema}" tables.append(importlib.import_module(module_name).__getattr__(table)) - return tables else: saio.register_schema(schema_name, engine) - tables = [] for table in table_names: module_name = f"saio.{schema_name}" tables.append(importlib.import_module(module_name).__getattr__(table)) - return tables + + return tables def from_cfg(self, config_path=None): """ @@ -303,21 +305,28 @@ def from_cfg(self, config_path=None): config_dict["demandlib"]["day_start"] = datetime.datetime.strptime( config_dict["demandlib"]["day_start"], "%H:%M" ) + config_dict["demandlib"]["day_start"] = datetime.time( config_dict["demandlib"]["day_start"].hour, config_dict["demandlib"]["day_start"].minute, ) + config_dict["demandlib"]["day_end"] = datetime.datetime.strptime( config_dict["demandlib"]["day_end"], "%H:%M" ) + config_dict["demandlib"]["day_end"] = datetime.time( config_dict["demandlib"]["day_end"].hour, config_dict["demandlib"]["day_end"].minute, ) - ( - config_dict["db_tables_dict"], - config_dict["db_schema_dict"], - ) = self.get_database_alias_dictionaries() + + if self._engine is not None and "toep.iks.cs.ovgu.de" in self._engine.url.host: + config_dict["db_tables_dict"], config_dict["db_schema_dict"] = ( + self.get_database_alias_dictionaries() + ) + else: + config_dict["db_tables_dict"] = config_dict["db_schema_dict"] = {} + return config_dict def to_json(self, directory, filename=None):