I think you actually want to do something like this:
using JuMP
import ForwardDiff
import Ipopt
f(x) = x[1]^2 + 2*x[1]*x[2] + x[2]
function partial(f::Function, i::Int)
function (x::Real...)
y = collect(Real, x)
return ForwardDiff.derivative(z -> (y[i] = z; f(y)), x[i])
end
end
model = Model(Ipopt.Optimizer)
@variable(model, y[1:2])
rhs = @expression(model, [0.5 * y[1] * y[2], 0.2 * y[2]^2])
for i in 1:2
name_i = Symbol("op_my_gradient_$i")
op = add_nonlinear_operator(model, 2, partial(f, i); name = name_i)
@constraint(model, op(y...) == rhs[i])
end
@objective(model, Max, y[1])
optimize!(model)
@assert is_solved_and_feasible(model)
value.(y)