diff --git a/src/Nonlinear/ReverseAD/reverse_mode.jl b/src/Nonlinear/ReverseAD/reverse_mode.jl index ca7fa1e0f8..5487da2237 100644 --- a/src/Nonlinear/ReverseAD/reverse_mode.jl +++ b/src/Nonlinear/ReverseAD/reverse_mode.jl @@ -54,6 +54,27 @@ function _reverse_mode(d::NLPEvaluator, x) for con in d.constraints _reverse_eval(con) end + # If a JuMP model uses the legacy nonlinear interface, then JuMP constructs + # a NLPEvaluator at the start of a call to `JuMP.optimize!` and it passes in + # the list of variables in the JuMP model to `.ordered_variables`. + # + # During `MOI.initialize`, `.last_x` gets filled with `NaN` to match the + # length of `ordered_variables`, that is, the number of variables in the + # JuMP model. + # + # However, if the model includes a bridge that adds new decision variables + # then the total number of variables in the optimizer (in `x`) will be + # larger than the cache in `last_x`. + # + # It is safe to resize `last_x` because only the variables in + # `ordered_variables` can appear in the NLPBlock. + # + # I don't think we need any other fixes because callers to things like + # `eval_objective` can pass in a longer input `x` vector without fear + # because the excess elements won't be used. + if length(d.last_x) < length(x) + resize!(d.last_x, length(x)) + end copyto!(d.last_x, x) return end diff --git a/test/Nonlinear/ReverseAD.jl b/test/Nonlinear/ReverseAD.jl index 9f3cc632fd..8eb597f873 100644 --- a/test/Nonlinear/ReverseAD.jl +++ b/test/Nonlinear/ReverseAD.jl @@ -1121,6 +1121,20 @@ function test_timers() return end +function test_varying_length_x() + model = MOI.Nonlinear.Model() + x = MOI.VariableIndex(1) + MOI.Nonlinear.set_objective(model, :(sin($x))) + evaluator = + MOI.Nonlinear.Evaluator(model, MOI.Nonlinear.SparseReverseMode(), [x]) + MOI.initialize(evaluator, Symbol[:Jac]) + ∇f = [NaN] + MOI.eval_objective_gradient(evaluator, ∇f, [1.0, 2.0]) + @test length(∇f) == 1 + @test ∇f[1] ≈ cos(1.0) + return +end + end # module TestReverseAD.runtests()