Skip to content

Commit

Permalink
better example
Browse files Browse the repository at this point in the history
  • Loading branch information
kdziedzic68 committed Oct 31, 2024
1 parent 65fc3cf commit 12e900b
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,13 @@ config:
optimize: true
range:
- 768
- 768
- 1536
encoding_format: float
- model: "text-embedding-3-large"
options:
dimensions:
768
optimize: true
range:
- 512
- 1024
encoding_format: float
2 changes: 1 addition & 1 deletion examples/evaluation/document-search/optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def main(config: DictConfig) -> None:
pipeline_class = get_cls_from_config(config.pipeline.type, module)
metrics = metric_set_factory(config.metrics)

optimization_cfg = OmegaConf.create({"direction": "maximize", "n_trials": 3})
optimization_cfg = OmegaConf.create({"direction": "maximize", "n_trials": 10})
optimizer = Optimizer(cfg=optimization_cfg)
configs_with_scores = optimizer.optimize(
pipeline_class=pipeline_class,
Expand Down
10 changes: 5 additions & 5 deletions packages/ragbits-evaluate/src/ragbits/evaluate/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def _objective(
metrics: MetricSet,
) -> float:
config_for_trial = deepcopy(config_with_params)
self._get_pipeline_config_from_parametrized(cfg=config_for_trial, trial=trial, ancestors=[])
self._set_values_for_optimized_params(cfg=config_for_trial, trial=trial, ancestors=[])
pipeline = pipeline_class(config_for_trial)
metrics_values = self._score(pipeline=pipeline, dataloader=dataloader, metrics=metrics)
score = sum(metrics_values.values())
Expand All @@ -73,7 +73,7 @@ def _score(self, pipeline: EvaluationPipeline, dataloader: DataLoader, metrics:
results = event_loop.run_until_complete(evaluator.compute(pipeline=pipeline, dataloader=dataloader, metrics=metrics))
return results["metrics"]

def _get_pipeline_config_from_parametrized(self, cfg: DictConfig, trial: optuna.Trial, ancestors: list[str]) -> None:
def _set_values_for_optimized_params(self, cfg: DictConfig, trial: optuna.Trial, ancestors: list[str]) -> None:
"""
Modifies the original dictionary in place, replacing values for keys that contain
'opt_params_range' with random numbers between the specified range [A, B] or for
Expand All @@ -99,14 +99,14 @@ def _get_pipeline_config_from_parametrized(self, cfg: DictConfig, trial: optuna.
choice = trial.suggest_categorical(name=param_id, choices=choices)
choice = OmegaConf.create(json.loads(choice)) if _is_json_string(choice) else choice
if isinstance(choice, DictConfig):
self._get_pipeline_config_from_parametrized(choice, trial, ancestors + [key])
self._set_values_for_optimized_params(choice, trial, ancestors + [key])
cfg[key] = choice
else:
self._get_pipeline_config_from_parametrized(value, trial, ancestors + [key])
self._set_values_for_optimized_params(value, trial, ancestors + [key])
elif isinstance(value, ListConfig):
for param in value:
if isinstance(param, DictConfig):
self._get_pipeline_config_from_parametrized(param, trial, ancestors + [key])
self._set_values_for_optimized_params(param, trial, ancestors + [key])



0 comments on commit 12e900b

Please sign in to comment.