From 704701669177b2a5415d7b187c15b91ff4830d52 Mon Sep 17 00:00:00 2001 From: Daniel Ingraham Date: Fri, 20 Sep 2024 15:46:27 -0400 Subject: [PATCH 1/2] Make `ksmax`, `ksmin`, `ksmax_adaptive`, `ksmin_adaptive` complex-step safe --- src/smooth.jl | 6 +++--- test/runtests.jl | 26 ++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/src/smooth.jl b/src/smooth.jl index d50af55..c204590 100644 --- a/src/smooth.jl +++ b/src/smooth.jl @@ -23,7 +23,7 @@ goes to infinity the maximum function is returned. Is mathematically guaranteed overestimate the maximum function, i.e. `maximum(x) <= ksmax(x, hardness)`. """ function ksmax(x, hardness=50) - k = maximum(x) + k = maximum(real(x)) return 1.0 / hardness * log(sum(exp.(hardness * (x .- k)))) .+ k end @@ -83,7 +83,7 @@ Computes the derivative of the Kreisselmeier–Steinhauser constraint aggregatio function with respect to `hardness`. """ function ksmax_h(x, hardness) - k = maximum(x) + k = maximum(real(x)) tmp1 = exp.(hardness * (x .- k)) tmp2 = sum((x .- k) .* tmp1) tmp3 = sum(tmp1) @@ -98,7 +98,7 @@ Computes the second derivative of the Kreisselmeier–Steinhauser constraint agg function with respect to `hardness`. """ function ksmax_hh(x, hardness) - k = maximum(x) + k = maximum(real(x)) tmp1 = exp.(hardness * (x .- k)) tmp2 = sum((x .- k) .* tmp1) tmp2_h = sum((x .- k) .^ 2 .* tmp1) diff --git a/test/runtests.jl b/test/runtests.jl index 7bdfe3a..fe487aa 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -260,6 +260,19 @@ hardness = 100.0 x_max_smooth = ksmin(x, hardness) @test isapprox(x_max_smooth, -0.006931471805599453) +# Test we can diff through ksmax and ksmin: +ksmax_wrapper(x) = sin(ksmax(x.^2 .+ 2)) +x0 = [1.0, 1.5, 2.0, 2.5, 3.0] +g1 = ForwardDiff.gradient(ksmax_wrapper, x0) +g2 = FiniteDiff.finite_difference_gradient(ksmax_wrapper, x0, Val(:complex)) +@test maximum(abs.(g2 .- g1)) < 1e-12 + +ksmin_wrapper(x) = sin(ksmin(x.^2 .+ 2)) +x0 = [1.0, 1.5, 2.0, 2.5, 3.0] +g1 = ForwardDiff.gradient(ksmin_wrapper, x0) +g2 = FiniteDiff.finite_difference_gradient(ksmin_wrapper, x0, Val(:complex)) +@test maximum(abs.(g2 .- g1)) < 1e-12 + # ------------------------- # ------ ksmax_adaptive --------- @@ -348,6 +361,19 @@ smoothing_fraction = 0.2 x_max_smooth = ksmin_adaptive(x, smoothing_fraction=smoothing_fraction) @test isapprox(x_max_smooth, -5.2856933329025475e-6) +# Test we can diff through ksmax_adaptive and ksmin_adaptive: +ksmax_adaptive_wrapper(x) = sin(ksmax_adaptive(x.^2 .+ 2)) +x0 = [1.0, 1.5, 2.0, 2.5, 3.0] +g1 = ForwardDiff.gradient(ksmax_adaptive_wrapper, x0) +g2 = FiniteDiff.finite_difference_gradient(ksmax_adaptive_wrapper, x0, Val(:complex)) +@test maximum(abs.(g2 .- g1)) < 1e-12 + +ksmin_adaptive_wrapper(x) = sin(ksmin_adaptive(x.^2 .+ 2)) +x0 = [1.0, 1.5, 2.0, 2.5, 3.0] +g1 = ForwardDiff.gradient(ksmin_adaptive_wrapper, x0) +g2 = FiniteDiff.finite_difference_gradient(ksmin_adaptive_wrapper, x0, Val(:complex)) +@test maximum(abs.(g2 .- g1)) < 1e-12 + # ------------------------- # ------ sigmoid --------- From 126b2c269d8cc26d25a06ab3528b93f8d0df7f95 Mon Sep 17 00:00:00 2001 From: Daniel Ingraham Date: Fri, 4 Oct 2024 13:57:49 -0400 Subject: [PATCH 2/2] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index cce2b2e..6aa9f66 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "FLOWMath" uuid = "6cb5d3fb-0fe8-4cc2-bd89-9fe0b19a99d3" authors = ["Andrew Ning "] -version = "0.4.1" +version = "0.4.2" [deps] LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"