Hi @SSGD, welcome to the forum.
See Computing Hessians · JuMP. You need to adapt it slightly to make:
julia> using JuMP, Ipopt, SparseArrays
julia> function build_nlp_evaluator(model)
rows = Any[]
nlp = MOI.Nonlinear.Model()
for (F, S) in list_of_constraint_types(model)
if !(F <: VariableRef)
for ci in all_constraints(model, F, S)
push!(rows, ci)
object = constraint_object(ci)
MOI.Nonlinear.add_constraint(nlp, object.func, object.set)
end
end
end
MOI.Nonlinear.set_objective(nlp, objective_function(model))
evaluator = MOI.Nonlinear.Evaluator(
nlp,
MOI.Nonlinear.SparseReverseMode(),
index.(all_variables(model)),
)
return evaluator, rows
end
build_nlp_evaluator (generic function with 1 method)
julia> begin
model = Model(Ipopt.Optimizer)
set_silent(model)
@variable(model, x[i = 1:2], start = -i)
@constraint(model, g_1, x[1]^2 <= 1)
@constraint(model, g_2, (x[1] + x[2])^2 <= 2)
@objective(model, Min, (1 - x[1])^2 + 100 * (x[2] - x[1]^2)^2)
optimize!(model)
end
julia> x_star = value.(x)
2-element Vector{Float64}:
0.7903587565231842
0.6238546272155127
julia> evaluator, rows = build_nlp_evaluator(model);
julia> MOI.initialize(evaluator, [:Jac]);
julia> sparsity = MOI.jacobian_structure(evaluator)
3-element Vector{Tuple{Int64, Int64}}:
(1, 1)
(2, 1)
(2, 2)
julia> I, J, V = first.(sparsity), last.(sparsity), zeros(length(sparsity));
julia> MOI.eval_constraint_jacobian(evaluator, V, x_star)
julia> Jac = SparseArrays.sparse(I, J, V, length(rows), num_variables(model))
2×2 SparseMatrixCSC{Float64, Int64} with 3 stored entries:
1.58072 ⋅
2.82843 2.82843
I should add a section on Jacobians to the documentation.