From 79a297b11f24d287f987b280f5b8e335bb388ae6 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 20 Sep 2024 21:52:51 +0530 Subject: [PATCH] Update evolutionary.md MOO docs update. --- .../src/optimization_packages/evolutionary.md | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/src/optimization_packages/evolutionary.md b/docs/src/optimization_packages/evolutionary.md index 9fa582c74..9ed2a2645 100644 --- a/docs/src/optimization_packages/evolutionary.md +++ b/docs/src/optimization_packages/evolutionary.md @@ -41,3 +41,23 @@ f = OptimizationFunction(rosenbrock) prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, 1.0]) sol = solve(prob, Evolutionary.CMAES(μ = 40, λ = 100)) ``` + +## Multi-objective optimization +The Rosenbrock and Ackley functions can be optimized using the `Evolutionary.NSGA2()` as follows: + +```@example MOO-Evolutionary +using Optimization, OptimizationEvolutionary +function func(x, p=nothing)::Vector{Float64} + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return [f1, f2] +end +initial_guess = [1.0, 1.0] +function gradient_multi_objective(x, p=nothing) + ForwardDiff.jacobian(func, x) +end +obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) +algorithm = OptimizationEvolutionary.NSGA2() +problem = OptimizationProblem(obj_func, initial_guess) +result = solve(problem, algorithm) +```