We expound the title through the following Convex Minimization Problem.
function convex_f(x) return x * log(x) end # ⚠️ This convex function is NOT defined at 0
function f_gradient(x) return log(x) + 1 end
import JuMP
import Ipopt
function optimise(lb_of_x)
CP = JuMP.Model(() -> Ipopt.Optimizer()) # CP stands for Convex Program
JuMP.@variable(CP, x >= lb_of_x)
JuMP.@objective(CP, Min, convex_f(x))
JuMP.optimize!(CP)
@assert JuMP.termination_status(CP) == JuMP.LOCALLY_SOLVED
return JuMP.value(x)
end
function assess(x)
@info "x = $x, f = $(convex_f(x)), ∇f = $(f_gradient(x))"
end
# ❌ The following solution returned by Ipopt is unexpected, since the domain of `x` is improperly set
x = optimise(0)
assess(x) # [ Info: x = 0.2627101793829332, f = -0.35116570400222863, ∇f = -0.3367038339628263
# ✅ The following solution is the expected optimal solution.
x = optimise(1e-6)
assess(x) # [ Info: x = 0.36787944368298275, f = -0.36787944117144233, ∇f = 6.827074683357637e-9