|
| 1 | +# https://github.com/JuliaSmoothOptimizers/NLPModelsTest.jl/blob/src/dercheck.jl#L43 |
| 2 | +function jacobian_residual_check( |
| 3 | + nlp::AbstractNLSModel; |
| 4 | + x::AbstractVector = nlp.meta.x0, |
| 5 | + atol::Float64 = 1.0e-6, |
| 6 | + rtol::Float64 = 1.0e-4, |
| 7 | +) |
| 8 | + |
| 9 | + # Fast exit if there are no constraints. |
| 10 | + J_errs = Dict{Tuple{Int, Int}, Float64}() |
| 11 | + nlp.nls_meta.nequ > 0 || return J_errs |
| 12 | + |
| 13 | + # Optimal-ish step for second-order centered finite differences. |
| 14 | + step = (eps(Float64) / 3)^(1 / 3) |
| 15 | + |
| 16 | + # Check constraints Jacobian. |
| 17 | + J = jac_residual(nlp, x) |
| 18 | + h = zeros(nlp.meta.nvar) |
| 19 | + cxph = zeros(nlp.nls_meta.nequ) |
| 20 | + cxmh = zeros(nlp.nls_meta.nequ) |
| 21 | + # Differentiate all constraints with respect to each variable in turn. |
| 22 | + for i = 1:(nlp.meta.nvar) |
| 23 | + h[i] = step |
| 24 | + residual!(nlp, x + h, cxph) |
| 25 | + residual!(nlp, x - h, cxmh) |
| 26 | + dcdxi = (cxph - cxmh) / 2 / step |
| 27 | + for j = 1:(nlp.nls_meta.nequ) |
| 28 | + err = abs(dcdxi[j] - J[j, i]) |
| 29 | + if err > atol + rtol * abs(dcdxi[j]) |
| 30 | + J_errs[(j, i)] = err |
| 31 | + end |
| 32 | + end |
| 33 | + h[i] = 0 |
| 34 | + end |
| 35 | + return J_errs |
| 36 | +end |
| 37 | + |
| 38 | +@testset "Test derivative Jacobian of residual" begin |
| 39 | + nls = BundleAdjustmentModel("problem-49-7776-pre") |
| 40 | + x = 10 * [-(-1.0)^i for i = 1:nls.meta.nvar] |
| 41 | + @test_broken length(jacobian_residual_check(nls, x = x)) == 0 |
| 42 | +end |
0 commit comments