Non-convex optimization

I recently revamped the NLP documentation. Here’s a sneak preview of what will be included in the next release: Nonlinear Modeling · JuMP.

I didn’t test this, so there may be typos, but you want something like:

function nlp_model(p, c1, c2,b, k, n)
    model = Model(Ipopt.Optimizer)
    @variable(model, 0 <= x[i = 1:n] <= 1)
    @variable(model, 0 <= var1 <=1)
    @variable(model, 0 <= var2 <=1)
    @variable(model, 0 <= var3 <=1)
    @objective(model, Max, var1 - var2 + var3)
    
    @NLparameter(model, p1[j=1:k, i=1:n] == c1[j, i])
    @NLparameter(model, p2[j=1:k, i=1:n] == c1[j, i])

    @NLexpression(model, xp, sum(x[i] * p[i] for i in 1:n))
    @NLexpression(model, c1_expr[j=1:k], sum(p1[j, i] * x[i] for i in 1:n))
    @NLexpression(model, c2_expr[j=1:k], sum(p2[j, i] * x[i] for i in 1:n))

    @NLconstraint(model, con3, xp - sum(b[j]       / (1+var1)^j for j in 1:k) == 0)
    @NLconstraint(model, con5, xp - sum(c1_expr[j] / (1+var2)^j for j in 1:k) == 0)
    @NLconstraint(model, con6, xp - sum(c2_expr[j] / (1+var3)^j for j in 1:k) == 0)

    @constraint(model, con1, c1*x .>= b)
    return model
end
1 Like