diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index 135588a0a..6bfc00a29 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -1,4 +1,6 @@ using OptimizationEvolutionary, Optimization, Random +using SciMLBase: MultiObjectiveOptimizationFunction +using NaNMath using Test Random.seed!(1234) @@ -56,4 +58,92 @@ Random.seed!(1234) # Make sure that both the user's trace record value, as well as `curr_u` are stored in the trace. @test haskey(sol.original.trace[end].metadata, "TESTVAL") && haskey(sol.original.trace[end].metadata, "curr_u") + + # Test Suite for Different Multi-Objective Functions +function test_multi_objective(func, initial_guess) + # Define the gradient function using ForwardDiff + function gradient_multi_objective(x, p=nothing) + ForwardDiff.jacobian(func, x) + end + + # Create an instance of MultiObjectiveOptimizationFunction + obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) + + # Set up the evolutionary algorithm (e.g., NSGA2) + algorithm = OptimizationEvolutionary.NSGA2() + + # Define the optimization problem + problem = OptimizationProblem(obj_func, initial_guess) + + # Solve the optimization problem + result = solve(problem, algorithm) + + return result +end + +@testset "Multi-Objective Optimization Tests" begin + + # Test 1: Sphere and Rastrigin Functions + @testset "Sphere and Rastrigin Functions" begin + function multi_objective_1(x, p=nothing)::Vector{Float64} + f1 = sum(x .^ 2) # Sphere function + f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) # Rastrigin function + return [f1, f2] + end + result = test_multi_objective(multi_objective_1, [0.25, 0.25]) + @test result ≠ nothing + println("Solution for Sphere and Rastrigin: ", result) + end + + # Test 2: Rosenbrock and Ackley Functions + @testset "Rosenbrock and Ackley Functions" begin + function multi_objective_2(x, p=nothing)::Vector{Float64} + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return [f1, f2] + end + result = test_multi_objective(multi_objective_2, [1.0, 1.0]) + @test result ≠ nothing + println("Solution for Rosenbrock and Ackley: ", result) + end + + # Test 3: ZDT1 Function + @testset "ZDT1 Function" begin + function multi_objective_3(x, p=nothing)::Vector{Float64} + f1 = x[1] + g = 1 + 9 * sum(x[2:end]) / (length(x) - 1) + sqrt_arg = f1 / g + f2 = g * (1 - (sqrt_arg >= 0 ? sqrt(sqrt_arg) : NaN)) + return [f1, f2] + end + result = test_multi_objective(multi_objective_3, [0.5, 0.5]) + @test result ≠ nothing + println("Solution for ZDT1: ", result) + end + + # Test 4: DTLZ2 Function + @testset "DTLZ2 Function" begin + function multi_objective_4(x, p=nothing)::Vector{Float64} + f1 = (1 + sum(x[2:end] .^ 2)) * cos(x[1] * π / 2) + f2 = (1 + sum(x[2:end] .^ 2)) * sin(x[1] * π / 2) + return [f1, f2] + end + result = test_multi_objective(multi_objective_4, [0.5, 0.5]) + @test result ≠ nothing + println("Solution for DTLZ2: ", result) + end + + # Test 5: Schaffer Function N.2 + @testset "Schaffer Function N.2" begin + function multi_objective_5(x, p=nothing)::Vector{Float64} + f1 = x[1]^2 + f2 = (x[1] - 2)^2 + return [f1, f2] + end + result = test_multi_objective(multi_objective_5, [2.0]) + @test result ≠ nothing + println("Solution for Schaffer N.2: ", result) + end + +end end