Cache results between function calls in Optimization.jl

I want to use a cost function and constraints depending on the solution of an ODE. This is costly to evaluate and hence I’d like to cache the result for function calls with the same parameters. For that, I wanted to use the memoize function from the JuMP documentation documentation .

function memoize(foo::Function, n_outputs::Int)
    last_x, last_f = nothing, nothing
    last_dx, last_dfdx = nothing, nothing
    
    function foo_i(i, x::T...) where {T<:Real}
        display(T)
        if T == Float64
            if x != last_x
                last_x, last_f = x, foo(x...)
            end
            return last_f[i]::T
        else
            if x != last_dx
                last_dx, last_dfdx = x, foo(x...)
            end
            return last_dfdx[i]::T
        end
    end
    return [(x...) -> foo_i(i, x...) for i in 1:n_outputs]
end

With this function, something goes wrong with ForwardDiff, but I can’t figure out where the problem is. Here is my MWE

using Optimization
using OptimizationMOI, Ipopt


fcn(x,p) = (1.0 - x[1])^2 + (2.0 - x[2])^2

x0 = [10.0; 20.0]

residual(x1,x2) = x1-x2

mem_res = memoize(residual,1)

cons1(res, x, p) = (res .= [residual(x[1],x[2])])
cons2(res, x, p) = (res .= [mem_res[1](x[1],x[2])])

optf = OptimizationFunction(fcn, Optimization.AutoForwardDiff();cons = cons1)
optprob = Optimization.OptimizationProblem(optf, x0,lcons = [-Inf], ucons = [0.0] )
sol1 = solve(optprob,  Ipopt.Optimizer(), abstol = 1.0E-5, maxiters = 20000)

optf = OptimizationFunction(fcn, Optimization.AutoForwardDiff();cons = cons2)
optprob = Optimization.OptimizationProblem(optf, x0,lcons = [-Inf], ucons = [0.0] )
sol2 = solve(optprob,  Ipopt.Optimizer(), abstol = 1.0E-5, maxiters = 20000)

The first call to solve works fine (no memoize), the second call fails with the following stacktrace

ERROR: TypeError: in typeassert, expected ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}, got a value of type ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#98#115"{Int64}, Float64}, Float64, 2}
Stacktrace:
  [1] (::var"#foo_i#71"{typeof(residual)})(::Int64, ::ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}, ::Vararg{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}})
    @ Main .\optMWE.jl:31
  [2] (::var"#70#73"{Int64, var"#foo_i#71"{typeof(residual)}})(::ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}, ::Vararg{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}})
    @ Main .\optMWE.jl:34
  [3] cons2(res::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}}, x::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}}, p::SciMLBase.NullParameters)
    @ Main .\optMWE.jl:46
  [4] (::Optimization.var"#97#114"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters})(res::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}}, θ::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}})
    @ Optimization C:\Users\user\.julia\packages\Optimization\aPPOg\src\function\forwarddiff.jl:78
  [5] (::Optimization.var"#98#115"{Int64})(x::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}})
    @ Optimization C:\Users\user\.julia\packages\Optimization\aPPOg\src\function\forwarddiff.jl:79
  [6] (::Optimization.var"#102#119"{Int64})(x::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}})
    @ Optimization C:\Users\user\.julia\packages\Optimization\aPPOg\src\function\forwarddiff.jl:92
  [7] vector_mode_dual_eval!
    @ C:\Users\user\.julia\packages\ForwardDiff\QdStj\src\apiutils.jl:37 [inlined]
  [8] vector_mode_gradient(f::Optimization.var"#102#119"{Int64}, x::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}}, cfg::ForwardDiff.GradientConfig{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}}})
    @ ForwardDiff C:\Users\user\.julia\packages\ForwardDiff\QdStj\src\gradient.jl:106
  [9] gradient
    @ C:\Users\user\.julia\packages\ForwardDiff\QdStj\src\gradient.jl:19 [inlined]
 [10] #116
    @ C:\Users\user\.julia\packages\ForwardDiff\QdStj\src\hessian.jl:32 [inlined]
 [11] vector_mode_dual_eval!
    @ C:\Users\user\.julia\packages\ForwardDiff\QdStj\src\apiutils.jl:37 [inlined]
 [12] vector_mode_jacobian!(result::Matrix{Float64}, f::ForwardDiff.var"#116#117"{Optimization.var"#102#119"{Int64}, ForwardDiff.HessianConfig{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}}, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}}}}, x::Vector{Float64}, cfg::ForwardDiff.JacobianConfig{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}}})
    @ ForwardDiff C:\Users\user\.julia\packages\ForwardDiff\QdStj\src\jacobian.jl:168
 [13] jacobian!
    @ C:\Users\user\.julia\packages\ForwardDiff\QdStj\src\jacobian.jl:58 [inlined]
 [14] hessian!
    @ C:\Users\user\.julia\packages\ForwardDiff\QdStj\src\hessian.jl:33 [inlined]
 [15] (::Optimization.var"#104#121"{Int64, Vector{ForwardDiff.HessianConfig{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}}, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}}}}, Vector{Optimization.var"#102#119"{Int64}}})(res::Vector{Matrix{Float64}}, θ::Vector{Float64})
    @ Optimization C:\Users\user\.julia\packages\Optimization\aPPOg\src\function\forwarddiff.jl:98
 [16] eval_hessian_lagrangian(moiproblem::OptimizationMOI.MOIOptimizationProblem{Float64, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Optimization.var"#90#107"{ForwardDiff.GradientConfig{ForwardDiff.Tag{Optimization.var"#89#106"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#89#106"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}, Float64}, Float64, 2}}}, Optimization.var"#89#106"{OptimizationFunction{true, 
Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}}, Optimization.var"#93#110"{ForwardDiff.HessianConfig{ForwardDiff.Tag{Optimization.var"#89#106"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#89#106"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#89#106"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}, Float64}, Float64, 2}, 2}}, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#89#106"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}, Float64}, Float64, 2}}}, Optimization.var"#89#106"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}}, Optimization.var"#96#113", Optimization.var"#97#114"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, 
Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}, Optimization.var"#99#116"{ForwardDiff.JacobianConfig{ForwardDiff.Tag{Optimization.var"#98#115"{Int64}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#98#115"{Int64}, Float64}, Float64, 2}}}}, Optimization.var"#104#121"{Int64, Vector{ForwardDiff.HessianConfig{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}}, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}}}}, Vector{Optimization.var"#102#119"{Int64}}}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, SciMLBase.NullParameters, Matrix{Float64}, Matrix{Float64}, Matrix{Float64}}, h::SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}, x::Vector{Float64}, σ::Float64, μ::SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true})
    @ OptimizationMOI C:\Users\user\.julia\packages\OptimizationMOI\cHl7S\src\OptimizationMOI.jl:189
 [17] eval_hessian_lagrangian(model::Ipopt.Optimizer, H::Vector{Float64}, x::Vector{Float64}, σ::Float64, μ::Vector{Float64})
    @ Ipopt C:\Users\user\.julia\packages\Ipopt\rQctM\src\MOI_wrapper.jl:573
 [18] (::Ipopt.var"#eval_h_cb#5"{Ipopt.Optimizer, Vector{Tuple{Int64, Int64}}})(x::Vector{Float64}, rows::Vector{Int32}, cols::Vector{Int32}, obj_factor::Float64, lambda::Vector{Float64}, values::Vector{Float64})
    @ Ipopt C:\Users\user\.julia\packages\Ipopt\rQctM\src\MOI_wrapper.jl:616
 [19] _Eval_H_CB(n::Int32, x_ptr::Ptr{Float64}, #unused#::Int32, obj_factor::Float64, m::Int32, lambda_ptr::Ptr{Float64}, #unused#::Int32, nele_hess::Int32, iRow::Ptr{Int32}, jCol::Ptr{Int32}, values_ptr::Ptr{Float64}, user_data::Ptr{Nothing})
    @ Ipopt C:\Users\user\.julia\packages\Ipopt\rQctM\src\C_wrapper.jl:127
 [20] IpoptSolve(prob::IpoptProblem)
    @ Ipopt C:\Users\user\.julia\packages\Ipopt\rQctM\src\C_wrapper.jl:442
 [21] optimize!(model::Ipopt.Optimizer)
    @ Ipopt C:\Users\user\.julia\packages\Ipopt\rQctM\src\MOI_wrapper.jl:727
 [22] __solve(prob::SciMLBase.OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Vector{Float64}, Vector{Float64}, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::Ipopt.Optimizer; maxiters::Int64, maxtime::Nothing, abstol::Float64, reltol::Nothing, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ OptimizationMOI C:\Users\user\.julia\packages\OptimizationMOI\cHl7S\src\OptimizationMOI.jl:381
 [23] solve(::SciMLBase.OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, typeof(fcn), Nothing, Nothing, Nothing, typeof(cons2), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Vector{Float64}, Vector{Float64}, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, ::Ipopt.Optimizer; kwargs::Base.Pairs{Symbol, Real, Tuple{Symbol, Symbol}, NamedTuple{(:abstol, :maxiters), Tuple{Float64, Int64}}})
    @ SciMLBase C:\Users\user\.julia\packages\SciMLBase\hLrpl\src\solve.jl:85
 [24] top-level scope
    @ .\optMWE.jl:54 ````

What’s the error message?

I’ve added the stacktrace directly now (without the “[details]” block…)

That’s the stacktrace, but what about the error message?

ahh sorry

ERROR: TypeError: in typeassert, expected ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#102#119"{Int64}, Float64}, Float64, 2}, 2}, got a value of type ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#98#115"{Int64}, Float64}, Float64, 2}

That memoization doesn’t seem to correctly handle duals. You may want to do it via PreallocationTools.jl instead.

Thank you very much for the hint! My new memoize function seems to work now. I use LazyBufferCache() for that.

function memoize(foo::Function, n_outputs::Int)
    
    lbc_x = LazyBufferCache()
    lbc_foo = LazyBufferCache()

    function foo_mem(x)
        tmp_x = lbc_x[x]
        tmp_foo = lbc_foo[repeat([first(x)],n_outputs)]

        if x != tmp_x
            res = foo(x)

            lbc_foo.bufs[(typeof(tmp_foo), (n_outputs,))] = res            
            lbc_x.bufs[(typeof(x), (length(x),))] = x

        else
            res = tmp_foo
        end
        
        return res
    end
    
    return (x) -> foo_mem(x)
end

At the moment, this function is not very general. The (single) input and output type of the function is a vector with probably different length but the same type.

Do you know a better way to update the cache instead of

    lbc_f.bufs[(typeof(tmp_f), (n_outputs,))] = res            
    lbc_x.bufs[(typeof(x), (length(x),))] = x

No that looks reasonable to me.

There is still some issue with this memoization when I use it in Optimization.jl. I was not able to figure out the mistake, but it seems that tmp_x and x have the same value when the memoized function is called in the optimization but I can’t figure out how this happens.

This effect does not occur in JuMP and both, the memoized and not memoized function calls give the same optimization result. Here is a MWE…

using JuMP, Ipopt
using PreallocationTools

x0 = [10.0; 20.0]

function residual(x)
    return [x[1]-x[2]]
end

mem_res = memoize(residual,1)

consfun1(x1,x2) = residual([x1;x2])[1]
consfun2(x1,x2) = mem_res([x1;x2])[1]

# solve with JuMP, without memoization
m1 = direct_model(Ipopt.Optimizer())
@variable(m1, x[1:2])

register(m1,:consfun,2,consfun1; autodiff = true)

@NLobjective(m1, Min,  (1.0 - x[1])^2 + (2.0 - x[2])^2)
@NLconstraint(m1, consfun(x[1],x[2]) <= 0)

set_start_value(x[1],x0[1])
set_start_value(x[2],x0[2])

optimize!(m1)


# solve with JuMP, with memoization
m2 = direct_model(Ipopt.Optimizer())
@variable(m2, x[1:2])

register(m2,:consfun,2,consfun2; autodiff = true)

@NLobjective(m2, Min,  (1.0 - x[1])^2 + (2.0 - x[2])^2)
@NLconstraint(m2, consfun(x[1],x[2]) <= 0)

set_start_value(x[1],x0[1])
set_start_value(x[2],x0[2])

optimize!(m2)

Both optimization here give the same result.

Oh oops, you’re treating the buffer as a memoization. You’d need to recompute the buffer value. Instead, just use the internals directly, i.e. the reinterpret method the same way on the tag type. Maybe this would be a nice primitive to have in the library.

Ohh I see :sweat_smile: thank you for the tips! I think this is trickier than I thought. When using reinterpret with the memoization function from the original post I ran into issues similar to Issue with PDMP and Forwardiff - DifferentialEquation - #27 by vettert
I will have a closer look…

It’s an old post but I ran into a similar problem today. I also use the memoize function from the JuMP doc. I solved the issue by substituting the normal equality comparison with object equivalence:

function memoize(foo::Function, n_outputs::Int)
    last_x, last_f = nothing, nothing
    last_dx, last_dfdx = nothing, nothing
    
    function foo_i(i, x::T...) where {T<:Real}
        display(T)
        if T == Float64
            if x !== last_x         # !== instead of !=
                last_x, last_f = x, foo(x...)
            end
            return last_f[i]::T
        else
            if x !== last_dx       # !== instead of !=
                last_dx, last_dfdx = x, foo(x...)
            end
            return last_dfdx[i]::T
        end
    end
    return [(x...) -> foo_i(i, x...) for i in 1:n_outputs]
end

now this code:

using Optimization
using OptimizationMOI, Ipopt

fcn(x,p) = (1.0 - x[1])^2 + (2.0 - x[2])^2

x0 = [10.0; 20.0]

residual(x1,x2) = x1-x2

mem_res = memoize(residual,1)

cons1(res, x, p) = (res .= [residual(x[1],x[2])])
cons2(res, x, p) = (res .= [mem_res[1](x[1],x[2])])

optf = OptimizationFunction(fcn, Optimization.AutoForwardDiff();cons = cons2)
optprob = Optimization.OptimizationProblem(optf, x0,lcons = [-Inf], ucons = [0.0] )
sol2 = solve(optprob,  Ipopt.Optimizer(), abstol = 1.0E-5, maxiters = 20000)

works as intended:

EXIT: Optimal Solution Found.
u: 2-element Vector{Float64}:
 0.9999999987452975
 2.0000000012547026
1 Like