Hello everyone,
I’m trying to build an optimization problem using Julia but keep getting error.
My objective function is:
where B_x is an incomplete beta function, and lambda is my optimal solution.
Here is my code and erros i got:
using JuMP, Ipopt, SpecialFunctions, QuadGK
n = 8
g(x) = quadgk(x -> (beta_inc(n - n*λ, n*λ + 1, x)[1] * beta(n - n*λ, n*λ + 1)) / (beta_inc(n - n*λ - 1, n*λ + 1, x)[1] * beta(n - n*λ - 1, n*λ + 1)), 0, 1, rtol=1e-8)[1]
f(λ) = 0.15 * λ * ((1-λ)^4 - λ^4) * g
m = Model(Ipopt.Optimizer)
register(m, :f, 1, f, autodiff = true)
@variable(m, 0 <= λ)
@NLobjective(m, Max, f(λ))
@NLconstraint(m, λ <= 1)
JuMP.optimize!(m)
println("** Optimal objective function value = ", JuMP.objective_value(m))
println("** Optimal solution = ", JuMP.value.(λ))
ERROR: Unable to register the function :f.
Common reasons for this include:
 * The function takes `f(x::Vector)` as input, instead of the splatted
   `f(x...)`.
 * The function assumes `Float64` will be passed as input, it must work for any
   generic `Real` type.
 * The function allocates temporary storage using `zeros(3)` or similar. This
   defaults to `Float64`, so use `zeros(T, 3)` instead.
Stacktrace:
 [1] error(s::String)
   @ Base .\error.jl:33
 [2] _validate_register_assumptions(f::typeof(f), name::Symbol, dimension::Int64)
   @ MathOptInterface.Nonlinear C:\Users\thvoe\.julia\packages\MathOptInterface\pgWRA\src\Nonlinear\operators.jl:327
 [3] MathOptInterface.Nonlinear._UnivariateOperator(op::Symbol, f::Function)
   @ MathOptInterface.Nonlinear C:\Users\thvoe\.julia\packages\MathOptInterface\pgWRA\src\Nonlinear\operators.jl:340
 [4] register_operator(registry::MathOptInterface.Nonlinear.OperatorRegistry, op::Symbol, nargs::Int64, f::Function)
   @ MathOptInterface.Nonlinear C:\Users\thvoe\.julia\packages\MathOptInterface\pgWRA\src\Nonlinear\operators.jl:399
 [5] register_operator
   @ C:\Users\thvoe\.julia\packages\MathOptInterface\pgWRA\src\Nonlinear\model.jl:217 [inlined]
 [6] register(model::Model, op::Symbol, dimension::Int64, f::Function; autodiff::Bool)
   @ JuMP C:\Users\thvoe\.julia\packages\JuMP\ToPd2\src\nlp.jl:720
 [7] top-level scope
   @ g:\My Drive\Study Abroad\Career\myPP\Optimization Model\Models\beta bid function.jl:8
caused by: MethodError: no method matching *(::ForwardDiff.Dual{ForwardDiff.Tag{typeof(f), Float64}, Float64, 1}, ::typeof(g))
Closest candidates are:
  *(::Any, ::Any, ::Any, ::Any...) at operators.jl:560
  *(::SpecialFunctions.SimplePoly, ::Any) at C:\Users\thvoe\.julia\packages\SpecialFunctions\QH8rV\src\expint.jl:8
  *(::ChainRulesCore.AbstractThunk, ::Any) at C:\Users\thvoe\.julia\packages\ChainRulesCore\0t04l\src\tangent_arithmetic.jl:125        
Stacktrace:
  [1] afoldl(op::typeof(*), a::ForwardDiff.Dual{ForwardDiff.Tag{typeof(f), Float64}, Float64, 1}, bs::Function)
    @ Base .\operators.jl:533
  [2] *(a::Float64, b::ForwardDiff.Dual{ForwardDiff.Tag{typeof(f), Float64}, Float64, 1}, c::ForwardDiff.Dual{ForwardDiff.Tag{typeof(f), Float64}, Float64, 1}, xs::Function)
    @ Base .\operators.jl:560
  [3] f(λ::ForwardDiff.Dual{ForwardDiff.Tag{typeof(f), Float64}, Float64, 1})
    @ Main g:\My Drive\Study Abroad\Career\myPP\Optimization Model\Models\beta bid function.jl:5
  [4] derivative
    @ C:\Users\thvoe\.julia\packages\ForwardDiff\PcZ48\src\derivative.jl:14 [inlined]
  [5] _validate_register_assumptions(f::typeof(f), name::Symbol, dimension::Int64)
    @ MathOptInterface.Nonlinear C:\Users\thvoe\.julia\packages\MathOptInterface\pgWRA\src\Nonlinear\operators.jl:321
  [6] MathOptInterface.Nonlinear._UnivariateOperator(op::Symbol, f::Function)
    @ MathOptInterface.Nonlinear C:\Users\thvoe\.julia\packages\MathOptInterface\pgWRA\src\Nonlinear\operators.jl:340
  [7] register_operator(registry::MathOptInterface.Nonlinear.OperatorRegistry, op::Symbol, nargs::Int64, f::Function)
    @ MathOptInterface.Nonlinear C:\Users\thvoe\.julia\packages\MathOptInterface\pgWRA\src\Nonlinear\operators.jl:399
  [8] register_operator
    @ C:\Users\thvoe\.julia\packages\MathOptInterface\pgWRA\src\Nonlinear\model.jl:217 [inlined]
  [9] register(model::Model, op::Symbol, dimension::Int64, f::Function; autodiff::Bool)
    @ JuMP C:\Users\thvoe\.julia\packages\JuMP\ToPd2\src\nlp.jl:720
 [10] top-level scope
    @ g:\My Drive\Study Abroad\Career\myPP\Optimization Model\Models\beta bid function.jl:8
I follow some example online for how to register user-defined function but it seems like doesnt work here. I would be very grateful for any help or suggestion to fix this.
Thanks