Thanks @odow. Here’s a now-working example, cribbing the Memoize function from https://jump.dev/JuMP.jl/stable/examples/nlp_tricks/#User-defined-functions-with-vector-outputs:
using JuMP, Ipopt, Random
# generate fake data
N = 100 # number of periods
β0 = 1.0 # regression parameter
x = randn(MersenneTwister(1234), N) # exog. regressor
ξ0 = 0.25 * randn(MersenneTwister(2345), N) # good-specific iid shock; in fact E[ξ] ≜ 0
u0 = x * β0 .+ ξ0 # mean utility of good 1
q = rand(MersenneTwister(3456), N) # probability of winning when choosing good 1 (good 2 always has q = 1, u = 0)
v, V = zeros(N), zeros(N) # initialize cvf(1), emax(1)
for i=1:N
v = q .* u0 + (1 .- q) .* vcat(V[2:end], 0.0) # solve cvf(1) for all t
V = log.(1 .+ exp.(v)) # solve emax for all t
end
s0 = exp.(v) ./(1 .+ exp.(v)) # generate "observed" market shares
# code up MPEC
function obj(ξ...)
sum(ξ[i] .^ 2 for i=1:N)
end
function getMeanUtilities(β, ξ::Vararg{TT, NN} where NN) where {TT <: Union{Real, VariableRef}}
# recover mean utilities
u = Vector{TT}(undef, N)
for i=1:N
u[i] = x[i] * β + ξ[i]
end
u
end
function getShares(u::Vararg{TT, NN} where NN) where {TT <: Union{Real, VariableRef}}
# # initialize and recover cvf
v = Vector{TT}(undef, N)
V = Vector{TT}(undef, N)
s = Vector{TT}(undef, N)
for i=1:N
v[i] = q[i] * u[i]
V[i] = log(1 + exp(v[i]))
end
tol = 1e-14
dist = 1e3
v_ = similar(v)
while dist > tol
v_ = copy(v)
for i=1:N
v[i] = q[i] * u[i] + (1-q[i]) * (i == N ? 0.0 : V[i+1])
V[i] = log(1 + exp(v[i]))
end
dist = norm(v_ - v)
end
for i=1:N
s[i] = exp(v[i])/(1 + exp(v[i]))
end
s
end
# now define model
"""
memoize(foo::Function, n_outputs::Int)
Take a function `foo` and return a vector of length `n_outputs`, where each
element is a function that returns the `i`'th output of `foo`.
To avoid duplication of work, cache the most-recent evaluations of `foo`.
Because `foo_i` is auto-differentiated with ForwardDiff, our cache needs to
work when `x` is a `Float64` and a `ForwardDiff.Dual`.
"""
function memoize(foo::Function, n_outputs::Int)
last_x, last_f = nothing, nothing
last_dx, last_dfdx = nothing, nothing
function foo_i(i, x::T...) where {T <: Real}
if T == Float64
if x != last_x
last_x, last_f = x, foo(x...)
end
return last_f[i]::T
else
if x != last_dx
last_dx, last_dfdx = x, foo(x...)
end
return last_dfdx[i]::T
end
end
return [(x...) -> foo_i(i, x...) for i = 1:n_outputs]
end
# memoize mean utility and share functions
memoized_u = memoize(getMeanUtilities, N)
memoized_s = memoize(getShares, N)
# run model
function runModel(u_functions, s_functions)
model = Model(Ipopt.Optimizer)
@variables(model, begin
β
ξ[1:N]
u[1:N]
end
)
for (i, f) in enumerate(u_functions)
f_sym = Symbol("f_$(i)")
register(model, f_sym, N+1, f; autodiff = true)
add_NL_constraint(model, :($(u[i]) == $(f_sym)($(β), $(ξ...))))
end
for (i, s) in enumerate(s_functions)
s_sym = Symbol("s_$(i)")
register(model, s_sym, N, s; autodiff = true)
add_NL_constraint(model, :($(s0[i]) == $(s_sym)($(u...))))
end
@constraint(model, β <= 3.0)
@constraint(model, β >= -3.0)
register(model, :obj, N, obj; autodiff=true)
@NLobjective(model, Min, obj(ξ...))
set_optimizer_attribute(model, "max_cpu_time", 420.0)
optimize!(model)
@show value(β)
# return model
end
b = runModel(memoized_u, memoized_s)