I got around to actually trying this and I am getting an error. This is on Julia v1.0, Optim v0.17.0.
f(x) = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2
function g!(G, x)
G[1] = -2.0 * (1.0 - x[1]) - 400.0 * (x[2] - x[1]^2) * x[1]
G[2] = 200.0 * (x[2] - x[1]^2)
end
function h!(H, x)
H[1, 1] = 2.0 - 400.0 * x[2] + 1200.0 * x[1]^2
H[1, 2] = -400.0 * x[1]
H[2, 1] = -400.0 * x[1]
H[2, 2] = 200.0
end
function fg!(F,G,x)
G == nothing || g!(G,x)
F == nothing || return f(x)
nothing
end
function fgh!(F,G,H,x)
G == nothing || g!(G,x)
H == nothing || h!(H,x)
F == nothing || return f(x)
nothing
end
import Optim
Optim.optimize(Optim.only_fg!(fg!), [0., 0.], Optim.LBFGS()) # works fine
Optim.optimize(Optim.only_fgh!(fgh!), [0., 0.], Optim.Newton())
# ERROR: MethodError: objects of type NLSolversBase.InplaceObjective{Nothing,Nothing,typeof(fgh!)} are not callable
Here is the stack trace
Stacktrace:
[1] finite_difference_gradient!(::Array{Float64,1}, ::NLSolversBase.InplaceObjective{Nothing,Nothing,typeof(fgh!)}, ::Array{Float64,1}, ::DiffEqDiffTools.GradientCache{Nothing,Nothing,Nothing,Val{:central},Float64,Val{true}}) at /opt/julia-depot/packages/DiffEqDiffTools/jv7Il/src/gradients.jl:282
[2] (::getfield(NLSolversBase, Symbol(“#g!#42”)){NLSolversBase.InplaceObjective{Nothing,Nothing,typeof(fgh!)},DiffEqDiffTools.GradientCache{Nothing,Nothing,Nothing,Val{:central},Float64,Val{true}}})(::Array{Float64,1}, ::Array{Float64,1}) at /opt/julia-depot/packages/NLSolversBase/Cvvki/src/objective_types/twicedifferentiable.jl:103
[3] (::getfield(NLSolversBase, Symbol(“#fg!#43”)){NLSolversBase.InplaceObjective{Nothing,Nothing,typeof(fgh!)}})(::Array{Float64,1}, ::Array{Float64,1}) at /opt/julia-depot/packages/NLSolversBase/Cvvki/src/objective_types/twicedifferentiable.jl:107
[4] value_gradient!!(::NLSolversBase.TwiceDifferentiable{Float64,Array{Float64,1},Array{Float64,2},Array{Float64,1}}, ::Array{Float64,1}) at /opt/julia-depot/packages/NLSolversBase/Cvvki/src/interface.jl:88
[5] initial_state(::Optim.Newton{LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}}}, ::Optim.Options{Float64,Nothing}, ::NLSolversBase.TwiceDifferentiable{Float64,Array{Float64,1},Array{Float64,2},Array{Float64,1}}, ::Array{Float64,1}) at /opt/julia-depot/packages/Optim/fabGe/src/multivariate/solvers/second_order/newton.jl:45
[6] #optimize#87 at /opt/julia-depot/packages/Optim/fabGe/src/multivariate/optimize/optimize.jl:33 [inlined]
[7] optimize(::NLSolversBase.InplaceObjective{Nothing,Nothing,typeof(fgh!)}, ::Array{Float64,1}, ::Optim.Newton{LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}}}, ::Optim.Options{Float64,Nothing}) at /opt/julia-depot/packages/Optim/fabGe/src/multivariate/optimize/interface.jl:113 (repeats 2 times)
[8] top-level scope at none:0
A similar error is produced even if the Hessian is not required, as in Optim.optimize(Optim.only_fgh!(fgh!), [0., 0.], Optim.LBFGS())
. So the problem seems to be inside only_fgh!
?
On Julia v0.6.4 with Optim v0.15.3 I get similar errors with only_fgh!
(only_fg!
works fine)