From 44b4a861bd2a35c78cac8963f41d0a1d59a666d8 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Wed, 26 Jun 2024 17:35:57 +0530 Subject: [PATCH 01/17] Updated loss function in OptimizationEvolutionary.jl The _loss function accommodates the MultiObjectiveOptimizationFunction in the __solve method. --- .../src/OptimizationEvolutionary.jl | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl index f31cf348c..ee8e32ebc 100644 --- a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl +++ b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl @@ -125,8 +125,13 @@ function SciMLBase.__solve(cache::OptimizationCache{ f = cache.f _loss = function (θ) - x = f(θ, cache.p, cur...) - return first(x) + if isa(f, MultiObjectiveOptimizationFunction) + x = f(θ, cache.p, cur...) + return x + else + x = f(θ, cache.p, cur...) + return first(x) + end end opt_args = __map_optimizer_args(cache, cache.opt; callback = _cb, cache.solver_args..., From 49ba1c1518605afe66fdc0f0312c4f7b4b42d383 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Tue, 16 Jul 2024 18:26:55 +0530 Subject: [PATCH 02/17] Traits changed for NSGA2 in OptimizationEvolutionary.jl Set the traits for NSGA2 explicitly --- lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl index ee8e32ebc..5bd74ac01 100644 --- a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl +++ b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl @@ -8,6 +8,7 @@ SciMLBase.allowsbounds(opt::Evolutionary.AbstractOptimizer) = true SciMLBase.allowsconstraints(opt::Evolutionary.AbstractOptimizer) = true SciMLBase.supports_opt_cache_interface(opt::Evolutionary.AbstractOptimizer) = true SciMLBase.requiresgradient(opt::Evolutionary.AbstractOptimizer) = false +SciMLBase.requiresgradient(opt::Evolutionary.NSGA2) = false SciMLBase.requireshessian(opt::Evolutionary.AbstractOptimizer) = false SciMLBase.requiresconsjac(opt::Evolutionary.AbstractOptimizer) = false SciMLBase.requiresconshess(opt::Evolutionary.AbstractOptimizer) = false From 0bce2a80948dc368617c80a0a8264a1c0114a99e Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 26 Jul 2024 21:39:20 +0530 Subject: [PATCH 03/17] Update loss function in OptimizationBBO.jl Updated the __loss function to handle MOOFunction results. --- lib/OptimizationBBO/src/OptimizationBBO.jl | 35 +++++++++++++++------- 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index f4487c0ba..4b6c23431 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -142,17 +142,32 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime) _loss = function (θ) - if cache.callback === Optimization.DEFAULT_CALLBACK && - cache.data === Optimization.DEFAULT_DATA - return first(cache.f(θ, cache.p)) - elseif cache.callback === Optimization.DEFAULT_CALLBACK - return first(cache.f(θ, cache.p, cur...)) - elseif cache.data !== Optimization.DEFAULT_DATA - x = cache.f(θ, cache.p) - return first(x) + if isa(f, MultiObjectiveOptimizationFunction) + if cache.callback === Optimization.DEFAULT_CALLBACK && + cache.data === Optimization.DEFAULT_DATA + return cache.f(θ, cache.p) + elseif cache.callback === Optimization.DEFAULT_CALLBACK + return cache.f(θ, cache.p, cur...) + elseif cache.data !== Optimization.DEFAULT_DATA + x = cache.f(θ, cache.p) + return x + else + x = cache.f(θ, cache.p, cur...) + return first(x) + end else - x = cache.f(θ, cache.p, cur...) - return first(x) + if cache.callback === Optimization.DEFAULT_CALLBACK && + cache.data === Optimization.DEFAULT_DATA + return first(cache.f(θ, cache.p)) + elseif cache.callback === Optimization.DEFAULT_CALLBACK + return first(cache.f(θ, cache.p, cur...)) + elseif cache.data !== Optimization.DEFAULT_DATA + x = cache.f(θ, cache.p) + return first(x) + else + x = cache.f(θ, cache.p, cur...) + return first(x) + end end end From 4142699dc1a838ac6e565e022cf2c902e3d00b71 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 15 Aug 2024 03:43:13 +0530 Subject: [PATCH 04/17] Added struct for borg_moea OptimizationBBO.jl Added the struct to use BB0_borg_moea. --- lib/OptimizationBBO/src/OptimizationBBO.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 4b6c23431..b7fd40dda 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -15,6 +15,10 @@ for j in string.(BlackBoxOptim.SingleObjectiveMethodNames) eval(Meta.parse("export BBO_" * j)) end +Base.@kwdef struct BBO_borg_moea <: BBO + method = :borg_moea +end + function decompose_trace(opt::BlackBoxOptim.OptRunController, progress) if progress maxiters = opt.max_steps From 770017efee8724e855706f61f60d249124df3f04 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 15 Aug 2024 04:05:03 +0530 Subject: [PATCH 05/17] Update OptimizationBBO.jl Exported the new borg_moea struct. --- lib/OptimizationBBO/src/OptimizationBBO.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index b7fd40dda..f3d12977c 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -18,6 +18,7 @@ end Base.@kwdef struct BBO_borg_moea <: BBO method = :borg_moea end +export BBO_borg_moea function decompose_trace(opt::BlackBoxOptim.OptRunController, progress) if progress From eadec858674e73ce1d85f31a0d7ae6d2b54de63d Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 15 Aug 2024 04:07:47 +0530 Subject: [PATCH 06/17] Loss function updated for BBO OptimizationBBO.jl Corrected the loss function check for MultiObjectiveOptimizationFunction. --- lib/OptimizationBBO/src/OptimizationBBO.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index f3d12977c..f7e0231e5 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -3,6 +3,7 @@ module OptimizationBBO using Reexport import Optimization import BlackBoxOptim, Optimization.SciMLBase +import Optimization.SciMLBase: MultiObjectiveOptimizationFunction abstract type BBO end @@ -147,7 +148,7 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime) _loss = function (θ) - if isa(f, MultiObjectiveOptimizationFunction) + if isa(cache.f, MultiObjectiveOptimizationFunction) if cache.callback === Optimization.DEFAULT_CALLBACK && cache.data === Optimization.DEFAULT_DATA return cache.f(θ, cache.p) From e5a67043c50e74eb4b56dd88e20f19dbd9f72ed0 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 16 Aug 2024 22:22:02 +0530 Subject: [PATCH 07/17] Added tests for MOO runtests.jl Added tests for the Multi-Objective Optimization tests to the test set. --- lib/OptimizationBBO/test/runtests.jl | 63 +++++++++++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index bd703f0f5..7e3bb2466 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -1,4 +1,5 @@ -using OptimizationBBO, Optimization +using OptimizationBBO, Optimization, BlackBoxOptim +using SciMLBase: MultiObjectiveOptimizationFunction using Test @testset "OptimizationBBO.jl" begin @@ -46,4 +47,64 @@ using Test progress = true, maxtime = 5) end + + # Define the initial guess and bounds +u0 = [0.25, 0.25] +lb = [0.0, 0.0] +ub = [2.0, 2.0] + +# Define the optimizer +opt = OptimizationBBO.BBO_borg_moea() + +@testset "Multi-Objective Optimization Tests" begin + + # Test 1: Sphere and Rastrigin Functions + @testset "Sphere and Rastrigin Functions" begin + function multi_obj_func_1(x, p) + f1 = sum(x .^ 2) # Sphere function + f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) # Rastrigin function + return (f1, f2) + end + + mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1) + prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb=lb, ub=ub) + sol_1 = solve(prob_1, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) + + @test sol_1 ≠ nothing + println("Solution for Sphere and Rastrigin: ", sol_1) + end + + # Test 2: Rosenbrock and Ackley Functions + @testset "Rosenbrock and Ackley Functions" begin + function multi_obj_func_2(x, p) + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return (f1, f2) + end + + mof_2 = MultiObjectiveOptimizationFunction(multi_obj_func_2) + prob_2 = Optimization.OptimizationProblem(mof_2, u0; lb=lb, ub=ub) + sol_2 = solve(prob_2, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) + + @test sol_2 ≠ nothing + println("Solution for Rosenbrock and Ackley: ", sol_2) + end + + # Test 3: ZDT1 Function + @testset "ZDT1 Function" begin + function multi_obj_func_3(x, p) + f1 = x[1] + g = 1 + 9 * sum(x[2:end]) / (length(x) - 1) + f2 = g * (1 - sqrt(f1 / g)) + return (f1, f2) + end + + mof_3 = MultiObjectiveOptimizationFunction(multi_obj_func_3) + prob_3 = Optimization.OptimizationProblem(mof_3, u0; lb=lb, ub=ub) + sol_3 = solve(prob_3, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) + + @test sol_3 ≠ nothing + println("Solution for ZDT1: ", sol_3) + end +end end From b5a045ce84d5643e93e19bdf2a86f4acba61f86f Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 16 Aug 2024 22:26:21 +0530 Subject: [PATCH 08/17] Added tests for MOO runtests.jl Added Multi-Objective Optimization tests to the test set. --- lib/OptimizationEvolutionary/test/runtests.jl | 90 +++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index 135588a0a..6bfc00a29 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -1,4 +1,6 @@ using OptimizationEvolutionary, Optimization, Random +using SciMLBase: MultiObjectiveOptimizationFunction +using NaNMath using Test Random.seed!(1234) @@ -56,4 +58,92 @@ Random.seed!(1234) # Make sure that both the user's trace record value, as well as `curr_u` are stored in the trace. @test haskey(sol.original.trace[end].metadata, "TESTVAL") && haskey(sol.original.trace[end].metadata, "curr_u") + + # Test Suite for Different Multi-Objective Functions +function test_multi_objective(func, initial_guess) + # Define the gradient function using ForwardDiff + function gradient_multi_objective(x, p=nothing) + ForwardDiff.jacobian(func, x) + end + + # Create an instance of MultiObjectiveOptimizationFunction + obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) + + # Set up the evolutionary algorithm (e.g., NSGA2) + algorithm = OptimizationEvolutionary.NSGA2() + + # Define the optimization problem + problem = OptimizationProblem(obj_func, initial_guess) + + # Solve the optimization problem + result = solve(problem, algorithm) + + return result +end + +@testset "Multi-Objective Optimization Tests" begin + + # Test 1: Sphere and Rastrigin Functions + @testset "Sphere and Rastrigin Functions" begin + function multi_objective_1(x, p=nothing)::Vector{Float64} + f1 = sum(x .^ 2) # Sphere function + f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) # Rastrigin function + return [f1, f2] + end + result = test_multi_objective(multi_objective_1, [0.25, 0.25]) + @test result ≠ nothing + println("Solution for Sphere and Rastrigin: ", result) + end + + # Test 2: Rosenbrock and Ackley Functions + @testset "Rosenbrock and Ackley Functions" begin + function multi_objective_2(x, p=nothing)::Vector{Float64} + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return [f1, f2] + end + result = test_multi_objective(multi_objective_2, [1.0, 1.0]) + @test result ≠ nothing + println("Solution for Rosenbrock and Ackley: ", result) + end + + # Test 3: ZDT1 Function + @testset "ZDT1 Function" begin + function multi_objective_3(x, p=nothing)::Vector{Float64} + f1 = x[1] + g = 1 + 9 * sum(x[2:end]) / (length(x) - 1) + sqrt_arg = f1 / g + f2 = g * (1 - (sqrt_arg >= 0 ? sqrt(sqrt_arg) : NaN)) + return [f1, f2] + end + result = test_multi_objective(multi_objective_3, [0.5, 0.5]) + @test result ≠ nothing + println("Solution for ZDT1: ", result) + end + + # Test 4: DTLZ2 Function + @testset "DTLZ2 Function" begin + function multi_objective_4(x, p=nothing)::Vector{Float64} + f1 = (1 + sum(x[2:end] .^ 2)) * cos(x[1] * π / 2) + f2 = (1 + sum(x[2:end] .^ 2)) * sin(x[1] * π / 2) + return [f1, f2] + end + result = test_multi_objective(multi_objective_4, [0.5, 0.5]) + @test result ≠ nothing + println("Solution for DTLZ2: ", result) + end + + # Test 5: Schaffer Function N.2 + @testset "Schaffer Function N.2" begin + function multi_objective_5(x, p=nothing)::Vector{Float64} + f1 = x[1]^2 + f2 = (x[1] - 2)^2 + return [f1, f2] + end + result = test_multi_objective(multi_objective_5, [2.0]) + @test result ≠ nothing + println("Solution for Schaffer N.2: ", result) + end + +end end From 0d1631e9fa9ade019b09429441d632746010e24c Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Fri, 16 Aug 2024 16:18:34 -0400 Subject: [PATCH 09/17] Apply suggestions from code review --- lib/OptimizationBBO/test/runtests.jl | 2 +- lib/OptimizationEvolutionary/test/runtests.jl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index 7e3bb2466..a2ac14a89 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -1,5 +1,5 @@ using OptimizationBBO, Optimization, BlackBoxOptim -using SciMLBase: MultiObjectiveOptimizationFunction +using Optimization.SciMLBase: MultiObjectiveOptimizationFunction using Test @testset "OptimizationBBO.jl" begin diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index 6bfc00a29..0ddf1730f 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -1,5 +1,5 @@ using OptimizationEvolutionary, Optimization, Random -using SciMLBase: MultiObjectiveOptimizationFunction +using Optimization.SciMLBase: MultiObjectiveOptimizationFunction using NaNMath using Test From 7ce124b566fce2e753197252a73eb62d358ece91 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sat, 17 Aug 2024 11:30:12 +0530 Subject: [PATCH 10/17] Removed unecessary package runtests.jl Removed NaNMath, as it wasn't needed. --- lib/OptimizationEvolutionary/test/runtests.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index 0ddf1730f..d749afe52 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -1,6 +1,5 @@ using OptimizationEvolutionary, Optimization, Random using Optimization.SciMLBase: MultiObjectiveOptimizationFunction -using NaNMath using Test Random.seed!(1234) From b02eb68f75ee44fcb7a073ae52a93d28145bab56 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 18 Aug 2024 02:20:28 +0530 Subject: [PATCH 11/17] Regression tests added runtests.jl Added regression tests to the tests for MOO. --- lib/OptimizationEvolutionary/test/runtests.jl | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index d749afe52..173e91669 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -92,6 +92,8 @@ end result = test_multi_objective(multi_objective_1, [0.25, 0.25]) @test result ≠ nothing println("Solution for Sphere and Rastrigin: ", result) + @test result.u[1][1] ≈ 0.00908 atol=1e-3 + @test result.u[1][2] ≈ 0.02312 atol=1e-3 end # Test 2: Rosenbrock and Ackley Functions @@ -104,6 +106,8 @@ end result = test_multi_objective(multi_objective_2, [1.0, 1.0]) @test result ≠ nothing println("Solution for Rosenbrock and Ackley: ", result) + @test result.u[2][1] ≈ 1.2428 atol=1e-3 + @test result.u[2][2] ≈ 0.7091 atol=1e-3 end # Test 3: ZDT1 Function @@ -118,6 +122,8 @@ end result = test_multi_objective(multi_objective_3, [0.5, 0.5]) @test result ≠ nothing println("Solution for ZDT1: ", result) + @test result.u[1][1] ≈ -1.1669 atol=1e-3 + @test result.u[1][2] ≈ 2.3492 atol=1e-3 end # Test 4: DTLZ2 Function @@ -130,6 +136,8 @@ end result = test_multi_objective(multi_objective_4, [0.5, 0.5]) @test result ≠ nothing println("Solution for DTLZ2: ", result) + @test result.u[1][1] ≈ -1.31011 atol=1e-3 + @test result.u[2][1] ≈ -1.38852 atol=1e-3 end # Test 5: Schaffer Function N.2 @@ -139,9 +147,11 @@ end f2 = (x[1] - 2)^2 return [f1, f2] end - result = test_multi_objective(multi_objective_5, [2.0]) + result = test_multi_objective(multi_objective_5, [1.0]) @test result ≠ nothing println("Solution for Schaffer N.2: ", result) + @test result.u[1][1] ≈ 1.00000 atol=1e-3 + @test result.u[9][1] ≈ 1.44114 atol=1e-3 end end From a8299ec5bbaf0db2f208acb7e2b8d72631ba5839 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 18 Aug 2024 02:22:28 +0530 Subject: [PATCH 12/17] Regression tests added runtests.jl Added regression tests for MOO. --- lib/OptimizationBBO/test/runtests.jl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index a2ac14a89..2c8ce38f6 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -72,6 +72,8 @@ opt = OptimizationBBO.BBO_borg_moea() @test sol_1 ≠ nothing println("Solution for Sphere and Rastrigin: ", sol_1) + @test sol_1.objective[1] ≈ 6.9905986e-18 atol=1e-3 + @test sol_1.objective[2] ≈ 1.7763568e-15 atol=1e-3 end # Test 2: Rosenbrock and Ackley Functions @@ -88,6 +90,8 @@ opt = OptimizationBBO.BBO_borg_moea() @test sol_2 ≠ nothing println("Solution for Rosenbrock and Ackley: ", sol_2) + @test sol_2.objective[1] ≈ 0.97438 atol=1e-3 + @test sol_2.objective[2] ≈ 0.04088 atol=1e-3 end # Test 3: ZDT1 Function @@ -105,6 +109,8 @@ opt = OptimizationBBO.BBO_borg_moea() @test sol_3 ≠ nothing println("Solution for ZDT1: ", sol_3) + @test sol_3.objective[1] ≈ 0.273445 atol=1e-3 + @test sol_3.objective[2] ≈ 0.477079 atol=1e-3 end end end From 57780fd829903509ceab0678ac1cdc796e480ec7 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 18 Aug 2024 11:48:36 +0530 Subject: [PATCH 13/17] Updated regression tests runtests.jl The regression tests have been updated to solve errors. --- lib/OptimizationEvolutionary/test/runtests.jl | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index 173e91669..8592f67e4 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -89,11 +89,11 @@ end f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) # Rastrigin function return [f1, f2] end - result = test_multi_objective(multi_objective_1, [0.25, 0.25]) + result = test_multi_objective(multi_objective_1, [0.0, 1.0]) @test result ≠ nothing println("Solution for Sphere and Rastrigin: ", result) - @test result.u[1][1] ≈ 0.00908 atol=1e-3 - @test result.u[1][2] ≈ 0.02312 atol=1e-3 + @test result.u[1][1] ≈ 7.88866e-5 atol=1e-3 + @test result.u[1][2] ≈ 4.96471e-5 atol=1e-3 end # Test 2: Rosenbrock and Ackley Functions @@ -106,8 +106,8 @@ end result = test_multi_objective(multi_objective_2, [1.0, 1.0]) @test result ≠ nothing println("Solution for Rosenbrock and Ackley: ", result) - @test result.u[2][1] ≈ 1.2428 atol=1e-3 - @test result.u[2][2] ≈ 0.7091 atol=1e-3 + @test result.u[10][1] ≈ 1.0 atol=1e-3 + @test result.u[10][2] ≈ 0.999739 atol=1e-3 end # Test 3: ZDT1 Function @@ -119,11 +119,11 @@ end f2 = g * (1 - (sqrt_arg >= 0 ? sqrt(sqrt_arg) : NaN)) return [f1, f2] end - result = test_multi_objective(multi_objective_3, [0.5, 0.5]) + result = test_multi_objective(multi_objective_3, [0.25, 1.5]) @test result ≠ nothing println("Solution for ZDT1: ", result) - @test result.u[1][1] ≈ -1.1669 atol=1e-3 - @test result.u[1][2] ≈ 2.3492 atol=1e-3 + @test result.u[1][1] ≈ -0.365434 atol=1e-3 + @test result.u[1][2] ≈ 1.22128 atol=1e-3 end # Test 4: DTLZ2 Function @@ -133,11 +133,11 @@ end f2 = (1 + sum(x[2:end] .^ 2)) * sin(x[1] * π / 2) return [f1, f2] end - result = test_multi_objective(multi_objective_4, [0.5, 0.5]) + result = test_multi_objective(multi_objective_4, [0.25, 0.75]) @test result ≠ nothing println("Solution for DTLZ2: ", result) - @test result.u[1][1] ≈ -1.31011 atol=1e-3 - @test result.u[2][1] ≈ -1.38852 atol=1e-3 + @test result.u[1][1] ≈ 0.899183 atol=1e-3 + @test result.u[2][1] ≈ 0.713992 atol=1e-3 end # Test 5: Schaffer Function N.2 @@ -150,8 +150,8 @@ end result = test_multi_objective(multi_objective_5, [1.0]) @test result ≠ nothing println("Solution for Schaffer N.2: ", result) - @test result.u[1][1] ≈ 1.00000 atol=1e-3 - @test result.u[9][1] ≈ 1.44114 atol=1e-3 + @test result.u[19][1] ≈ 0.252635 atol=1e-3 + @test result.u[9][1] ≈ 1.0 atol=1e-3 end end From d0eb465dafbdba5c1407a3638b67b88f8894a004 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 19 Aug 2024 02:26:25 +0530 Subject: [PATCH 14/17] Update the optimize function call OptimizationEvolutionary.jl The optimize function call updated to handle multi-objective problems. --- .../src/OptimizationEvolutionary.jl | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl index 5bd74ac01..a8721d391 100644 --- a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl +++ b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl @@ -145,9 +145,17 @@ function SciMLBase.__solve(cache::OptimizationCache{ c = x -> (res = zeros(length(cache.lcons)); f.cons(res, x); res) cons = WorstFitnessConstraints(Float64[], Float64[], cache.lcons, cache.ucons, c) - opt_res = Evolutionary.optimize(_loss, cons, cache.u0, cache.opt, opt_args) + if isa(f, MultiObjectiveOptimizationFunction) + opt_res = Evolutionary.optimize(_loss, _loss(cache.u0), cons, cache.u0, cache.opt, opt_args) + else + opt_res = Evolutionary.optimize(_loss, cons, cache.u0, cache.opt, opt_args) + end else - opt_res = Evolutionary.optimize(_loss, cache.u0, cache.opt, opt_args) + if isa(f, MultiObjectiveOptimizationFunction) + opt_res = Evolutionary.optimize(_loss, _loss(cache.u0), cache.u0, cache.opt, opt_args) + else + opt_res = Evolutionary.optimize(_loss, cache.u0, cache.opt, opt_args) + end end else if !isnothing(f.cons) @@ -156,7 +164,11 @@ function SciMLBase.__solve(cache::OptimizationCache{ else cons = BoxConstraints(cache.lb, cache.ub) end - opt_res = Evolutionary.optimize(_loss, cons, cache.u0, cache.opt, opt_args) + if isa(f, MultiObjectiveOptimizationFunction) + opt_res = Evolutionary.optimize(_loss, _loss(cache.u0), cons, cache.u0, cache.opt, opt_args) + else + opt_res = Evolutionary.optimize(_loss, cons, cache.u0, cache.opt, opt_args) + end end t1 = time() opt_ret = Symbol(Evolutionary.converged(opt_res)) From 44d25b922dc7407b0cdc32156ae6bed32a22da17 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 19 Aug 2024 04:21:30 +0530 Subject: [PATCH 15/17] Update build_solution OptimizationEvolutionary.jl Added the build_solution alternative call for MOO tests. --- .../src/OptimizationEvolutionary.jl | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl index a8721d391..374006377 100644 --- a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl +++ b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl @@ -174,11 +174,20 @@ function SciMLBase.__solve(cache::OptimizationCache{ opt_ret = Symbol(Evolutionary.converged(opt_res)) stats = Optimization.OptimizationStats(; iterations = opt_res.iterations, time = t1 - t0, fevals = opt_res.f_calls) - SciMLBase.build_solution(cache, cache.opt, + if !isa(f, MultiObjectiveOptimizationFunction) + SciMLBase.build_solution(cache, cache.opt, Evolutionary.minimizer(opt_res), Evolutionary.minimum(opt_res); original = opt_res, retcode = opt_ret, stats = stats) + else + ans = Evolutionary.minimizer(opt_res) + SciMLBase.build_solution(cache, cache.opt, + ans, + _loss(ans[1]); original = opt_res, + retcode = opt_ret, + stats = stats) + end end end From ac0f630550623fa81a3f0fe07cdbc91cace04214 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 19 Aug 2024 04:24:14 +0530 Subject: [PATCH 16/17] Update objective function tests in runtests.jl Added objective tests for the testset. --- lib/OptimizationEvolutionary/test/runtests.jl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index 8592f67e4..bde1de210 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -94,6 +94,7 @@ end println("Solution for Sphere and Rastrigin: ", result) @test result.u[1][1] ≈ 7.88866e-5 atol=1e-3 @test result.u[1][2] ≈ 4.96471e-5 atol=1e-3 + @test result.objective[1] ≈ 8.6879e-9 atol=1e-3 end # Test 2: Rosenbrock and Ackley Functions @@ -108,6 +109,7 @@ end println("Solution for Rosenbrock and Ackley: ", result) @test result.u[10][1] ≈ 1.0 atol=1e-3 @test result.u[10][2] ≈ 0.999739 atol=1e-3 + @test result.objective[2] ≈ 3.625384 atol=1e-3 end # Test 3: ZDT1 Function @@ -124,6 +126,7 @@ end println("Solution for ZDT1: ", result) @test result.u[1][1] ≈ -0.365434 atol=1e-3 @test result.u[1][2] ≈ 1.22128 atol=1e-3 + @test result.objective[1] ≈ -0.365434 atol=1e-3 end # Test 4: DTLZ2 Function @@ -138,6 +141,7 @@ end println("Solution for DTLZ2: ", result) @test result.u[1][1] ≈ 0.899183 atol=1e-3 @test result.u[2][1] ≈ 0.713992 atol=1e-3 + @test result.objective[1] ≈ 0.1599915 atol=1e-3 end # Test 5: Schaffer Function N.2 @@ -152,6 +156,7 @@ end println("Solution for Schaffer N.2: ", result) @test result.u[19][1] ≈ 0.252635 atol=1e-3 @test result.u[9][1] ≈ 1.0 atol=1e-3 + @test result.objective[1] ≈ 1.0 atol=1e-3 end end From 0cc91e86e34af07f75c50362514b93a620204de4 Mon Sep 17 00:00:00 2001 From: Vaibhav Dixit Date: Sun, 18 Aug 2024 19:37:42 -0400 Subject: [PATCH 17/17] Finish up tests --- lib/OptimizationEvolutionary/test/runtests.jl | 174 +++++++++--------- 1 file changed, 89 insertions(+), 85 deletions(-) diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index bde1de210..4bc276c7b 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -59,105 +59,109 @@ Random.seed!(1234) haskey(sol.original.trace[end].metadata, "curr_u") # Test Suite for Different Multi-Objective Functions -function test_multi_objective(func, initial_guess) - # Define the gradient function using ForwardDiff - function gradient_multi_objective(x, p=nothing) - ForwardDiff.jacobian(func, x) - end + function test_multi_objective(func, initial_guess) + # Define the gradient function using ForwardDiff + function gradient_multi_objective(x, p=nothing) + ForwardDiff.jacobian(func, x) + end - # Create an instance of MultiObjectiveOptimizationFunction - obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) + # Create an instance of MultiObjectiveOptimizationFunction + obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) - # Set up the evolutionary algorithm (e.g., NSGA2) - algorithm = OptimizationEvolutionary.NSGA2() + # Set up the evolutionary algorithm (e.g., NSGA2) + algorithm = OptimizationEvolutionary.NSGA2() - # Define the optimization problem - problem = OptimizationProblem(obj_func, initial_guess) + # Define the optimization problem + problem = OptimizationProblem(obj_func, initial_guess) - # Solve the optimization problem - result = solve(problem, algorithm) - - return result -end + # Solve the optimization problem + result = solve(problem, algorithm) -@testset "Multi-Objective Optimization Tests" begin + return result + end - # Test 1: Sphere and Rastrigin Functions - @testset "Sphere and Rastrigin Functions" begin - function multi_objective_1(x, p=nothing)::Vector{Float64} - f1 = sum(x .^ 2) # Sphere function - f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) # Rastrigin function - return [f1, f2] + @testset "Multi-Objective Optimization Tests" begin + + # Test 1: Sphere and Rastrigin Functions + @testset "Sphere and Rastrigin Functions" begin + function multi_objective_1(x, p=nothing)::Vector{Float64} + f1 = sum(x .^ 2) # Sphere function + f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) # Rastrigin function + return [f1, f2] + end + result = test_multi_objective(multi_objective_1, [0.0, 1.0]) + @test result ≠ nothing + println("Solution for Sphere and Rastrigin: ", result) + @test result.u[1][1] ≈ 7.88866e-5 atol=1e-3 + @test result.u[1][2] ≈ 4.96471e-5 atol=1e-3 + @test result.objective[1] ≈ 8.6879e-9 atol=1e-3 + @test result.objective[2] ≈ 1.48875349381683e-6 atol=1e-3 end - result = test_multi_objective(multi_objective_1, [0.0, 1.0]) - @test result ≠ nothing - println("Solution for Sphere and Rastrigin: ", result) - @test result.u[1][1] ≈ 7.88866e-5 atol=1e-3 - @test result.u[1][2] ≈ 4.96471e-5 atol=1e-3 - @test result.objective[1] ≈ 8.6879e-9 atol=1e-3 - end - # Test 2: Rosenbrock and Ackley Functions - @testset "Rosenbrock and Ackley Functions" begin - function multi_objective_2(x, p=nothing)::Vector{Float64} - f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function - f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function - return [f1, f2] + # Test 2: Rosenbrock and Ackley Functions + @testset "Rosenbrock and Ackley Functions" begin + function multi_objective_2(x, p=nothing)::Vector{Float64} + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return [f1, f2] + end + result = test_multi_objective(multi_objective_2, [0.1, 1.0]) + @test result ≠ nothing + println("Solution for Rosenbrock and Ackley: ", result) + @test result.u[1][1] ≈ 0.003993274873103834 atol=1e-3 + @test result.u[1][2] ≈ 0.001433311246712721 atol=1e-3 + @test result.objective[1] ≈ 0.9922302888530358 atol=1e-3 + @test result.objective[2] ≈ 0.012479470703588902 atol=1e-3 end - result = test_multi_objective(multi_objective_2, [1.0, 1.0]) - @test result ≠ nothing - println("Solution for Rosenbrock and Ackley: ", result) - @test result.u[10][1] ≈ 1.0 atol=1e-3 - @test result.u[10][2] ≈ 0.999739 atol=1e-3 - @test result.objective[2] ≈ 3.625384 atol=1e-3 - end - # Test 3: ZDT1 Function - @testset "ZDT1 Function" begin - function multi_objective_3(x, p=nothing)::Vector{Float64} - f1 = x[1] - g = 1 + 9 * sum(x[2:end]) / (length(x) - 1) - sqrt_arg = f1 / g - f2 = g * (1 - (sqrt_arg >= 0 ? sqrt(sqrt_arg) : NaN)) - return [f1, f2] + # Test 3: ZDT1 Function + @testset "ZDT1 Function" begin + function multi_objective_3(x, p=nothing)::Vector{Float64} + f1 = x[1] + g = 1 + 9 * sum(x[2:end]) / (length(x) - 1) + sqrt_arg = f1 / g + f2 = g * (1 - (sqrt_arg >= 0 ? sqrt(sqrt_arg) : NaN)) + return [f1, f2] + end + result = test_multi_objective(multi_objective_3, [0.25, 1.5]) + @test result ≠ nothing + println("Solution for ZDT1: ", result) + @test result.u[1][1] ≈ -0.365434 atol=1e-3 + @test result.u[1][2] ≈ 1.22128 atol=1e-3 + @test result.objective[1] ≈ -0.365434 atol=1e-3 + @test isnan(result.objective[2]) end - result = test_multi_objective(multi_objective_3, [0.25, 1.5]) - @test result ≠ nothing - println("Solution for ZDT1: ", result) - @test result.u[1][1] ≈ -0.365434 atol=1e-3 - @test result.u[1][2] ≈ 1.22128 atol=1e-3 - @test result.objective[1] ≈ -0.365434 atol=1e-3 - end - # Test 4: DTLZ2 Function - @testset "DTLZ2 Function" begin - function multi_objective_4(x, p=nothing)::Vector{Float64} - f1 = (1 + sum(x[2:end] .^ 2)) * cos(x[1] * π / 2) - f2 = (1 + sum(x[2:end] .^ 2)) * sin(x[1] * π / 2) - return [f1, f2] + # Test 4: DTLZ2 Function + @testset "DTLZ2 Function" begin + function multi_objective_4(x, p=nothing)::Vector{Float64} + f1 = (1 + sum(x[2:end] .^ 2)) * cos(x[1] * π / 2) + f2 = (1 + sum(x[2:end] .^ 2)) * sin(x[1] * π / 2) + return [f1, f2] + end + result = test_multi_objective(multi_objective_4, [0.25, 0.75]) + @test result ≠ nothing + println("Solution for DTLZ2: ", result) + @test result.u[1][1] ≈ 0.899183 atol=1e-3 + @test result.u[2][1] ≈ 0.713992 atol=1e-3 + @test result.objective[1] ≈ 0.1599915 atol=1e-3 + @test result.objective[2] ≈ 1.001824893932647 atol=1e-3 end - result = test_multi_objective(multi_objective_4, [0.25, 0.75]) - @test result ≠ nothing - println("Solution for DTLZ2: ", result) - @test result.u[1][1] ≈ 0.899183 atol=1e-3 - @test result.u[2][1] ≈ 0.713992 atol=1e-3 - @test result.objective[1] ≈ 0.1599915 atol=1e-3 - end - # Test 5: Schaffer Function N.2 - @testset "Schaffer Function N.2" begin - function multi_objective_5(x, p=nothing)::Vector{Float64} - f1 = x[1]^2 - f2 = (x[1] - 2)^2 - return [f1, f2] + # Test 5: Schaffer Function N.2 + @testset "Schaffer Function N.2" begin + function multi_objective_5(x, p=nothing)::Vector{Float64} + f1 = x[1]^2 + f2 = (x[1] - 2)^2 + return [f1, f2] + end + result = test_multi_objective(multi_objective_5, [1.0]) + @test result ≠ nothing + println("Solution for Schaffer N.2: ", result) + @test result.u[19][1] ≈ 0.252635 atol=1e-3 + @test result.u[9][1] ≈ 1.0 atol=1e-3 + @test result.objective[1] ≈ 1.0 atol=1e-3 + @test result.objective[2] ≈ 1.0 atol=1e-3 end - result = test_multi_objective(multi_objective_5, [1.0]) - @test result ≠ nothing - println("Solution for Schaffer N.2: ", result) - @test result.u[19][1] ≈ 0.252635 atol=1e-3 - @test result.u[9][1] ≈ 1.0 atol=1e-3 - @test result.objective[1] ≈ 1.0 atol=1e-3 end - -end end