Here are the docs: https://julianlsolvers.github.io/Optim.jl/stable/#examples/generated/ipnewton_basics/#multiple-constraints
I guess you want something like this, assuming I haven’t made a mistake.
using Optim
f(x) = sum(x)
function f_grad!(g, x)
fill!(g, 1.0)
return g
end
function f_hess!(h, x)
fill!(h, 0.0)
return h
end
function c(y, x)
y[1] = x[1] / 3 - x[2]
y[2] = 1 / x[3] + 1 / x[4]
return y
end
function c_jac!(J, x)
fill!(J, 0.0)
# c₁'(x) = [1 / 3, -1, 0, 0]
J[1, 1] = 1 / 3
J[1, 2] = -1
# c₂'(x) = [0, 0, -1 / x[3]^2, -1 / x[4]^2]
J[2, 3] = -1 / x[3]^2
J[2, 4] = -1 / x[4]^2
return J
end
function c_hess!(H, x, λ)
# c₁''(x) = 0
# c₂''(x) = Diag([0, 0, 2 / x[3]^3, 2 / x[4]^2])
H[3, 3] += λ[2] * 2 / x[3]^3
H[4, 4] += λ[2] * 2 / x[4]^3
return H
end
x0 = [0.5, 0.1, 1.2, 0.2]
df = TwiceDifferentiable(f, f_grad!, f_hess!, x0)
dfc = TwiceDifferentiableConstraints(
c,
c_jac!,
c_hess!,
[0.0, 0.0, 0.0, 0.0],
Float64[],
[0.0, 5.0],
[1 / 3, 6.0],
)
optimize(df, dfc, x0, IPNewton())
You could also write this in JuMP (Introduction · JuMP), which may be easier:
using JuMP, Ipopt
model = Model(Ipopt.Optimizer)
@variable(model, x[1:4] >= 0.00001)
@NLconstraint(model, 0 <= x[1] / 3 - x[2] <= 1 / 3)
@NLconstraint(model, 5 <= 1 / x[3] + 1 / x[4] <= 6)
@objective(model, Min, sum(x))