Skip to content

Commit a74a454

Browse files
Update runtests.jl
Added DAE with constraints optimization tests.
1 parent 19913b7 commit a74a454

File tree

1 file changed

+255
-29
lines changed

1 file changed

+255
-29
lines changed

lib/OptimizationODE/test/runtests.jl

Lines changed: 255 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,44 +1,270 @@
11
using Test
2-
using OptimizationODE, SciMLBase, ADTypes
2+
using OptimizationODE
3+
using Optimization
4+
using LinearAlgebra, ForwardDiff
5+
using OrdinaryDiffEq, DifferentialEquations, SteadyStateDiffEq, Sundials
36

4-
@testset "OptimizationODE Tests" begin
7+
function rosenbrock(x, p,args...)
8+
return (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2
9+
end
510

6-
function f(x, p)
7-
return sum(abs2, x)
8-
end
11+
function rosenbrock_grad!(grad, x, p,args...)
12+
grad[1] = -2.0 * (p[1] - x[1]) - 4.0 * p[2] * (x[2] - x[1]^2) * x[1]
13+
grad[2] = 2.0 * p[2] * (x[2] - x[1]^2)
14+
end
915

10-
function g!(g, x, p)
11-
@. g = 2 * x
12-
end
16+
function quadratic(x, p,args...)
17+
return (x[1] - p[1])^2 + (x[2] - p[2])^2
18+
end
19+
20+
function quadratic_grad!(grad, x, p,args...)
21+
grad[1] = 2.0 * (x[1] - p[1])
22+
grad[2] = 2.0 * (x[2] - p[2])
23+
end
1324

14-
x0 = [2.0, -3.0]
15-
p = [5.0]
25+
# Constrained optimization problem
26+
function constrained_objective(x, p,args...)
27+
return x[1]^2 + x[2]^2
28+
end
1629

17-
f_autodiff = OptimizationFunction(f, ADTypes.AutoForwardDiff())
18-
prob_auto = OptimizationProblem(f_autodiff, x0, p)
30+
function constrained_objective_grad!(g, x, p, args...)
31+
g .= 2 .* x .* p[1]
32+
return nothing
33+
end
1934

20-
for opt in (ODEGradientDescent(dt=0.01), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent())
21-
sol = solve(prob_auto, opt; maxiters=50_000)
22-
@test sol.u [0.0, 0.0] atol=1e-2
23-
@test sol.objective 0.0 atol=1e-2
24-
@test sol.retcode == ReturnCode.Success
25-
end
35+
# Constraint: x₁ - x₂ - p[1] = 0 (p[1] = 1 → x₁ - x₂ = 1)
36+
function constraint_func(x, p, args...)
37+
return x[1] - x[2] - p[1]
38+
end
2639

27-
f_manual = OptimizationFunction(f, SciMLBase.NoAD(); grad=g!)
28-
prob_manual = OptimizationProblem(f_manual, x0)
40+
function constraint_jac!(J, x,args...)
41+
J[1, 1] = 1.0
42+
J[1, 2] = -1.0
43+
return nothing
44+
end
2945

30-
for opt in (ODEGradientDescent(dt=0.01), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent())
31-
sol = solve(prob_manual, opt; maxiters=50_000)
32-
@test sol.u [0.0, 0.0] atol=1e-2
33-
@test sol.objective 0.0 atol=1e-2
34-
@test sol.retcode == ReturnCode.Success
46+
@testset "OptimizationODE.jl Tests" begin
47+
48+
49+
@testset "Basic Unconstrained Optimization" begin
50+
@testset "Quadratic Function - ODE Optimizers" begin
51+
x0 = [2.0, 2.0]
52+
p = [1.0, 1.0] # Minimum at (1, 1)
53+
54+
optf = OptimizationFunction(quadratic, grad=quadratic_grad!)
55+
prob = OptimizationProblem(optf, x0, p)
56+
57+
optimizers = [
58+
("ODEGradientDescent", ODEGradientDescent()),
59+
("RKChebyshevDescent", RKChebyshevDescent()),
60+
("RKAccelerated", RKAccelerated()),
61+
("HighOrderDescent", HighOrderDescent())
62+
]
63+
64+
for (name, opt) in optimizers
65+
@testset "$name" begin
66+
sol = solve(prob, opt, dt=0.001, maxiters=1000000)
67+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
68+
@test isapprox(sol.u, p, atol=1e-1)
69+
@test sol.objective < 1e-2
70+
end
71+
end
72+
end
73+
74+
@testset "Rosenbrock Function - Selected Optimizers" begin
75+
x0 = [1.5, 2.0]
76+
p = [1.0, 100.0] # Classic Rosenbrock parameters
77+
78+
optf = OptimizationFunction(rosenbrock, grad=rosenbrock_grad!)
79+
prob = OptimizationProblem(optf, x0, p)
80+
81+
# Test with more robust optimizers for Rosenbrock
82+
optimizers = [
83+
("RKAccelerated", RKAccelerated()),
84+
("HighOrderDescent", HighOrderDescent())
85+
]
86+
87+
for (name, opt) in optimizers
88+
@testset "$name" begin
89+
sol = solve(prob, opt, dt=0.001, maxiters=1000000)
90+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
91+
# Rosenbrock is harder, so we use looser tolerances
92+
@test isapprox(sol.u[1], 1.0, atol=1e-1)
93+
@test isapprox(sol.u[2], 1.0, atol=1e-1)
94+
@test sol.objective < 1.0
95+
end
96+
end
97+
end
3598
end
99+
100+
@testset "Constrained Optimization - DAE Optimizers" begin
101+
@testset "Equality Constrained Optimization" begin
102+
# Minimize f(x) = x₁² + x₂²
103+
# Subject to x₁ - x₂ = 1
36104

37-
f_fail = OptimizationFunction(f, SciMLBase.NoAD())
38-
prob_fail = OptimizationProblem(f_fail, x0)
105+
x0 = [1.0, 0.0] # reasonable initial guess
106+
p = [1.0] # enforce x₁ - x₂ = 1
39107

40-
for opt in (ODEGradientDescent(dt=0.001), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent())
41-
@test_throws ErrorException solve(prob_fail, opt; maxiters=20_000)
108+
optf = OptimizationFunction(constrained_objective;
109+
grad = constrained_objective_grad!,
110+
cons = constraint_func,
111+
cons_j = constraint_jac!)
112+
113+
@testset "Equality Constrained - Mass Matrix Method" begin
114+
prob = OptimizationProblem(optf, x0, p)
115+
opt = DAEMassMatrix()
116+
sol = solve(prob, opt; dt=0.01, maxiters=1_000_000)
117+
118+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
119+
@test isapprox(sol.u[1] - sol.u[2], 1.0; atol = 1e-2)
120+
@test isapprox(sol.u, [0.5, -0.5]; atol = 1e-2)
42121
end
43122

123+
@testset "Equality Constrained - Index Method" begin
124+
prob = OptimizationProblem(optf, x0, p)
125+
opt = DAEIndexing()
126+
differential_vars = [true, true, false] # x vars = differential, λ = algebraic
127+
sol = solve(prob, opt; dt=0.01, maxiters=1_000_000,
128+
differential_vars = differential_vars)
129+
130+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
131+
@test isapprox(sol.u[1] - sol.u[2], 1.0; atol = 1e-2)
132+
@test isapprox(sol.u, [0.5, -0.5]; atol = 1e-2)
133+
end
134+
end
135+
end
136+
137+
@testset "Parameter Handling" begin
138+
@testset "NullParameters Handling" begin
139+
x0 = [0.0, 0.0]
140+
p=Float64[] # No parameters provided
141+
# Create a problem with NullParameters
142+
optf = OptimizationFunction((x, p, args...) -> sum(x.^2),
143+
grad=(grad, x, p, args...) -> (grad .= 2.0 .* x))
144+
prob = OptimizationProblem(optf, x0,p) # No parameters provided
145+
146+
opt = ODEGradientDescent()
147+
sol = solve(prob, opt, dt=0.01, maxiters=100000)
148+
149+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
150+
@test isapprox(sol.u, [0.0, 0.0], atol=1e-2)
151+
end
152+
153+
@testset "Regular Parameters" begin
154+
x0 = [0.5, 1.5]
155+
p = [1.0, 1.0]
156+
157+
optf = OptimizationFunction(quadratic, grad=quadratic_grad!)
158+
prob = OptimizationProblem(optf, x0, p)
159+
160+
opt = RKAccelerated()
161+
sol = solve(prob, opt; dt=0.001, maxiters=1000000)
162+
163+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
164+
@test isapprox(sol.u, p, atol=1e-1)
165+
end
166+
end
167+
168+
@testset "Solver Options and Keywords" begin
169+
@testset "Custom dt and maxiters" begin
170+
x0 = [0.0, 0.0]
171+
p = [1.0, 1.0]
172+
173+
optf = OptimizationFunction(quadratic, grad=quadratic_grad!)
174+
prob = OptimizationProblem(optf, x0, p)
175+
176+
opt = RKAccelerated()
177+
178+
# Test with custom dt
179+
sol1 = solve(prob, opt; dt=0.001, maxiters=100000)
180+
@test sol1.retcode == ReturnCode.Success || sol1.retcode == ReturnCode.Default
181+
182+
# Test with smaller dt (should be more accurate)
183+
sol2 = solve(prob, opt; dt=0.001, maxiters=100000)
184+
@test sol2.retcode == ReturnCode.Success || sol2.retcode == ReturnCode.Default
185+
@test sol2.objective <= sol1.objective # Should be at least as good
186+
end
187+
end
188+
189+
@testset "Callback Functionality" begin
190+
@testset "Progress Callback" begin
191+
x0 = [0.0, 0.0]
192+
p = [1.0, 1.0]
193+
194+
callback_called = Ref(false)
195+
callback_values = Vector{Vector{Float64}}()
196+
197+
function test_callback(x, p, t)
198+
return false
199+
end
200+
201+
optf = OptimizationFunction(quadratic; grad=quadratic_grad!)
202+
prob = OptimizationProblem(optf, x0, p)
203+
204+
opt = RKAccelerated()
205+
sol = solve(prob, opt, dt=0.1, maxiters=100000, callback=test_callback, progress=true)
206+
207+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
208+
end
209+
end
210+
211+
@testset "Finite Difference Jacobian" begin
212+
@testset "Jacobian Computation" begin
213+
x = [1.0, 2.0]
214+
f(x) = [x[1]^2 + x[2], x[1] * x[2]]
215+
216+
J = OptimizationODE.finite_difference_jacobian(f, x)
217+
218+
expected_J = [2.0 1.0; 2.0 1.0]
219+
220+
@test isapprox(J, expected_J, atol=1e-6)
221+
end
222+
end
223+
224+
@testset "Solver Type Detection" begin
225+
@testset "Mass Matrix Solvers" begin
226+
opt = DAEMassMatrix()
227+
@test OptimizationODE.get_solver_type(opt) == :mass_matrix
228+
end
229+
230+
@testset "Index Method Solvers" begin
231+
opt = DAEIndexing()
232+
@test OptimizationODE.get_solver_type(opt) == :indexing
233+
end
234+
end
235+
236+
@testset "Error Handling and Edge Cases" begin
237+
@testset "Empty Constraints" begin
238+
x0 = [1.5, 0.5]
239+
p = Float64[]
240+
241+
# Problem without constraints should fall back to ODE method
242+
optf = OptimizationFunction(constrained_objective,
243+
grad=constrained_objective_grad!)
244+
prob = OptimizationProblem(optf, x0, p)
245+
246+
opt = DAEMassMatrix()
247+
sol = solve(prob, opt; dt=0.001, maxiters=50000)
248+
249+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
250+
@test isapprox(sol.u, [0.0, 0.0], atol=1e-1)
251+
end
252+
253+
@testset "Single Variable Optimization" begin
254+
x0 = [0.5]
255+
p = [1.0]
256+
257+
single_var_func(x, p,args...) = (x[1] - p[1])^2
258+
single_var_grad!(grad, x, p,args...) = (grad[1] = 2.0 * (x[1] - p[1]))
259+
260+
optf = OptimizationFunction(single_var_func; grad=single_var_grad!)
261+
prob = OptimizationProblem(optf, x0, p)
262+
263+
opt = RKAccelerated()
264+
sol = solve(prob, opt; dt=0.001, maxiters=10000)
265+
266+
@test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default
267+
@test isapprox(sol.u[1], p[1], atol=1e-1)
268+
end
269+
end
44270
end

0 commit comments

Comments
 (0)