forked from dagush/WholeBrain
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathTest_L2L_HelloWorld2.py
77 lines (65 loc) · 3.55 KB
/
Test_L2L_HelloWorld2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# =======================================================================
# =======================================================================
# Test for L2L, copyied and pasted from l2l-fun-ga.py
# =======================================================================
# =======================================================================
# import os
# import yaml
import numpy as np
from l2l.optimizees.functions import tools as function_tools
from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions
from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee
from l2l.optimizers.evolution import GeneticAlgorithmOptimizer, GeneticAlgorithmParameters
from l2l.optimizers.gridsearch import GridSearchOptimizer, GridSearchParameters
from l2l.utils.experiment import Experiment
def main():
experiment = Experiment(root_dir_path='Data_Produced/L2L')
# name = 'L2L-FUN-GA'
name = 'L2L-FUN-GS'
traj, _ = experiment.prepare_experiment(name=name, log_stdout=True, multiprocessing=False)
# ---------------------------------------------------------------------------------------------------------
# Benchmark function
"""
Ackley function has a large hole in at the centre surrounded by small hill like regions. Algorithms can get
trapped in one of its many local minima.
reference: https://www.sfu.ca/~ssurjano/ackley.html
:param dims: dimensionality of the function
Note: uses the recommended variable values, which are: a = 20, b = 0.2 and c = 2π.
"""
function_id = 4 # Select Ackley2d
bench_functs = BenchmarkedFunctions()
(benchmark_name, benchmark_function), benchmark_parameters = \
bench_functs.get_function_by_index(function_id, noise=True)
# ---------------------------------------------------------------------------------------------------------
optimizee_seed = 100
random_state = np.random.RandomState(seed=optimizee_seed)
# function_tools.plot(benchmark_function, random_state)
## Innerloop simulator
optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed)
## Outerloop optimizer initialization
# parameters = GeneticAlgorithmParameters(seed=0, pop_size=50, cx_prob=0.5,
# mut_prob=0.3, n_iteration=100,
# ind_prob=0.02,
# tourn_size=15, mate_par=0.5,
# mut_par=1
# )
#
# optimizer = GeneticAlgorithmOptimizer(traj, optimizee_create_individual=optimizee.create_individual,
# optimizee_fitness_weights=(-0.1,),
# parameters=parameters)
# Setup the GridSearchOptimizer
n_grid_divs_per_axis = 30
parameters = GridSearchParameters(param_grid={
'coords': (optimizee.bound[0], optimizee.bound[1], n_grid_divs_per_axis)
})
optimizer = GridSearchOptimizer(traj,
optimizee_create_individual=optimizee.create_individual,
optimizee_fitness_weights=(-0.1,), # minimize!
parameters=parameters)
## Optimization!!!
experiment.run_experiment(optimizer=optimizer, optimizee=optimizee,
optimizee_parameters=parameters)
experiment.end_experiment(optimizer)
print(f"best: {experiment.optimizer.best_individual['coords']}")
if __name__ == '__main__':
main()