Read the tutorial: https://github.com/JuliaOpt/NLopt.jl#tutorial. You need to provide the gradient:
using ForwardDiff
using NLopt
opt = NLopt.Opt(:LN_SBPLX, 3);
f(x) = sum(x.^2)
function my_obj(x, g)
if length(g) > 0
ForwardDiff.gradient!(g, f, x)
end
return f(x)
end
opt.min_objective = my_obj
sol = NLopt.optimize(opt, [1.0, 25.0, 0.1])
You may be interested in https://github.com/JuliaNLSolvers/Optim.jl