I am trying to optimize a likelihood function for the multivariate case. This is the code I am using
using Optim, LinearAlgebra, ForwardDiff
#observed covariance
cov_obs = [1.3629 0.408729 0.581832
0.408729 1.38639 0.452567
0.581832 0.452567 1.27911]
# define SEM in RAM notation
function f(x)
S = [x[1] 0 0 0
0 x[2] 0 0
0 0 x[3] 0
0 0 0 x[4]]
F = [1 0 0 0
0 1 0 0
0 0 1 0]
A = [0 0 0 1
0 0 0 x[5]
0 0 0 x[6]
0 0 0 0]
Cov_Exp = F*inv(I-A)*S*transpose(inv(I-A))*transpose(F)
return log(det(Cov_Exp)) + tr(cov_obs*inv(Cov_Exp)) - log(det(cov_obs)) - 3
end
###############################################################
### works without Gradient
#initial values
x0 = ones(6)
result = optimize(f, x0)
###############################################################
### does not work with gradient
func = TwiceDifferentiable(x -> f(x),
ones(6), autodiff=:forward)
# does not work
optimize(f, x0, LBFGS(), autodiff = :forward)
optimize(func, x0)
# works
ForwardDiff.gradient(f, x0)
I am not able to solve the problem using an algorithm that requires a gradient. This is the error I get:
DomainError with -0.004445959499469189:
log will only return a complex result if called with a complex argument. Try log(Complex(x)).
throw_complex_domainerror(::Symbol, ::Float64) at math.jl:32
log(::Float64) at log.jl:285
log at dual.jl:203 [inlined]
f(::Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(f),Float64},Float64,6},1}) at SEM.jl:40
vector_mode_gradient!(::DiffResults.MutableDiffResult{1,Float64,Tuple{Array{Float64,1}}}, ::typeof(f), ::Array{Float64,1}, ::ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(f),Float64},Float64,6,Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(f),Float64},Float64,6},1}}) at apiutils.jl:37
gradient! at gradient.jl:35 [inlined]
gradient! at gradient.jl:33 [inlined]
(::NLSolversBase.var"#14#18"{Float64,typeof(f),ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(f),Float64},Float64,6,Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(f),Float64},Float64,6},1}}})(::Array{Float64,1}, ::Array{Float64,1}) at oncedifferentiable.jl:69
value_gradient!!(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at interface.jl:82
value_gradient!(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at interface.jl:69
value_gradient!(::Optim.ManifoldObjective{OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}}, ::Array{Float64,1}) at Manifolds.jl:50
(::LineSearches.var"#ϕdϕ#6"{Optim.ManifoldObjective{OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}},Array{Float64,1},Array{Float64,1},Array{Float64,1}})(::Float64) at LineSearches.jl:84
(::LineSearches.HagerZhang{Float64,Base.RefValue{Bool}})(::Function, ::LineSearches.var"#ϕdϕ#6"{Optim.ManifoldObjective{OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}},Array{Float64,1},Array{Float64,1},Array{Float64,1}}, ::Float64, ::Float64, ::Float64) at hagerzhang.jl:227
HagerZhang at hagerzhang.jl:101 [inlined]
perform_linesearch!(::Optim.LBFGSState{Array{Float64,1},Array{Array{Float64,1},1},Array{Array{Float64,1},1},Float64,Array{Float64,1}}, ::LBFGS{Nothing,LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}},Optim.var"#19#21"}, ::Optim.ManifoldObjective{OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}}) at perform_linesearch.jl:53
update_state!(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Optim.LBFGSState{Array{Float64,1},Array{Array{Float64,1},1},Array{Array{Float64,1},1},Float64,Array{Float64,1}}, ::LBFGS{Nothing,LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}},Optim.var"#19#21"}) at l_bfgs.jl:198
optimize(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}, ::LBFGS{Nothing,LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}},Optim.var"#19#21"}, ::Optim.Options{Float64,Nothing}, ::Optim.LBFGSState{Array{Float64,1},Array{Array{Float64,1},1},Array{Array{Float64,1},1},Float64,Array{Float64,1}}) at optimize.jl:57
optimize(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}, ::LBFGS{Nothing,LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}},Optim.var"#19#21"}, ::Optim.Options{Float64,Nothing}) at optimize.jl:33
#optimize#93 at interface.jl:116 [inlined]
(::Optim.var"#kw##optimize")(::NamedTuple{(:autodiff,),Tuple{Symbol}}, ::typeof(optimize), ::Function, ::Array{Float64,1}, ::LBFGS{Nothing,LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}},Optim.var"#19#21"}, ::Optim.Options{Float64,Nothing}) at none:0
(::Optim.var"#kw##optimize")(::NamedTuple{(:autodiff,),Tuple{Symbol}}, ::typeof(optimize), ::Function, ::Array{Float64,1}, ::LBFGS{Nothing,LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}},Optim.var"#19#21"}) at none:0
top-level scope at SEM.jl:62
Thanks!
Maximilian