Skip to content

Commit

Permalink
[Nonlinear.ReverseAD] fix NLPBlock and bridges (#2524)
Browse files Browse the repository at this point in the history
  • Loading branch information
odow authored Jul 14, 2024
1 parent 2ae5939 commit a15b67f
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 0 deletions.
21 changes: 21 additions & 0 deletions src/Nonlinear/ReverseAD/reverse_mode.jl
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,27 @@ function _reverse_mode(d::NLPEvaluator, x)
for con in d.constraints
_reverse_eval(con)
end
# If a JuMP model uses the legacy nonlinear interface, then JuMP constructs
# a NLPEvaluator at the start of a call to `JuMP.optimize!` and it passes in
# the list of variables in the JuMP model to `.ordered_variables`.
#
# During `MOI.initialize`, `.last_x` gets filled with `NaN` to match the
# length of `ordered_variables`, that is, the number of variables in the
# JuMP model.
#
# However, if the model includes a bridge that adds new decision variables
# then the total number of variables in the optimizer (in `x`) will be
# larger than the cache in `last_x`.
#
# It is safe to resize `last_x` because only the variables in
# `ordered_variables` can appear in the NLPBlock.
#
# I don't think we need any other fixes because callers to things like
# `eval_objective` can pass in a longer input `x` vector without fear
# because the excess elements won't be used.
if length(d.last_x) < length(x)
resize!(d.last_x, length(x))
end
copyto!(d.last_x, x)
return
end
Expand Down
14 changes: 14 additions & 0 deletions test/Nonlinear/ReverseAD.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1121,6 +1121,20 @@ function test_timers()
return
end

function test_varying_length_x()
model = MOI.Nonlinear.Model()
x = MOI.VariableIndex(1)
MOI.Nonlinear.set_objective(model, :(sin($x)))
evaluator =
MOI.Nonlinear.Evaluator(model, MOI.Nonlinear.SparseReverseMode(), [x])
MOI.initialize(evaluator, Symbol[:Jac])
∇f = [NaN]
MOI.eval_objective_gradient(evaluator, ∇f, [1.0, 2.0])
@test length(∇f) == 1
@test ∇f[1] cos(1.0)
return
end

end # module

TestReverseAD.runtests()

0 comments on commit a15b67f

Please sign in to comment.