Good afternoon,
I cannot figure out the right syntax for a box constrained optimization but where i need to change the line search as I was getting an error: ArgumentError: Value and slope at step length = 0 must be finite.
when using res= optimize(bk, lower, upper, x0, Fminbox(inner_optimizer))
in the code below. because of this initial error, i tried to change the line search and tried out many different syntaxes, but I am always getting a method error.
I tried for example: res= optimize(bk, x0, lower, upper, Fminbox(), optimizer = GradientDescent, linesearch=LineSearches.Static)
, or res= optimize(bk, lower, upper, x0, Fminbox(GradientDescent()), linesearch=LineSearches.Static)
Would anyone know the correct syntax or know how to solve the initial error if changing the line search is not the right path?
Many thanks!
Here is the full (initial) code:
using Optim
using QuadGK
using QuantEcon
using Interpolations
using LineSearches
module Para
Re=1.3
Rl=1.1
E=2.0
points=3
lowerB=0.01
end
import ..Para
function solve(Gamma::AbstractFloat)
u(c) = (c).^Gamma
function bk_obj(Sb::AbstractFloat,
c_bar::AbstractFloat,
D::AbstractFloat)
Lf= Para.Rl/(Para.Re- Para.Rl) * D * c_bar
Sf= Para.E - D - Lf
if centralized
P_star= (Lf /1 - Sb)
else
P_star= Para.Re / Para.Rl
end
θhigh = (Para.Re * (1 - Sb) + Para.Re * (1 - Sb) * (P_star) - c_bar * (P_star) * D) / (c_bar * D * (Para.Re - (Lf / Sb)) )
c1_ND = D * c_bar
c1_D = (1 - Sb) + Lf
c2ND(θ) = (((1 - Sb) - θ* c_bar * D) * Para.Rl + Sb * Para.Re + Lf * Para.Rl + Sf * Para.Re ) / (1 - θ)
c2D(θ) = (1 - Sb) + Lf + ( (Sb + Sf) * Para.Re ) / (1 - θ)
val1(θ) = u(θ * c1_ND + (1 - θ)* c2ND(θ))
val2(θ) = u(θ * c1_D + (1 - θ)* c2D(θ))
return -(quadgk(val1, 0.1, θhigh)[1] + quadgk(val2, θhigh, 0.999)[1])
end
grid = collect(range(Para.lowerB, length=Para.points, stop=Para.E))
inner_optimizer = GradientDescent()
for (i,D) in enumerate(grid)
bk(x) = bk_obj(x[1], x[2], D)
D_vec[i]= D
lower = [0.05, 0.1]
upper = [D, D]
x0 = [0.05, 0.1]
res= optimize(bk, lower, upper, x0, Fminbox(inner_optimizer))
end
end
solve(2.3)
and the initial error that made me try change the line search:
ArgumentError: Value and slope at step length = 0 must be finite.
Stacktrace:
[1] macro expansion at C:\Users\arquie\.julia\packages\Parameters\35RKe\src\Parameters.jl:734 [inlined]
[2] (::HagerZhang{Float64,Base.RefValue{Bool}})(::getfield(LineSearches, Symbol("#ϕ#1")){Optim.ManifoldObjective{OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}},Array{Float64,1},Array{Float64,1},Array{Float64,1}}, ::getfield(LineSearches, Symbol("#ϕdϕ#6")){Optim.ManifoldObjective{OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}},Array{Float64,1},Array{Float64,1},Array{Float64,1}}, ::Float64, ::Float64, ::Float64) at C:\Users\arquie\.julia\packages\LineSearches\SrdrY\src\hagerzhang.jl:111
[3] HagerZhang at C:\Users\arquie\.julia\packages\LineSearches\SrdrY\src\hagerzhang.jl:101 [inlined]
[4] perform_linesearch!(::Optim.GradientDescentState{Array{Float64,1},Float64}, ::GradientDescent{InitialPrevious{Float64},HagerZhang{Float64,Base.RefValue{Bool}},Optim.InverseDiagonal,getfield(Optim, Symbol("##56#62")){Array{Float64,1},Array{Float64,1},Fminbox{GradientDescent{InitialPrevious{Float64},HagerZhang{Float64,Base.RefValue{Bool}},Nothing,getfield(Optim, Symbol("##12#14"))},Float64,getfield(Optim, Symbol("##46#48"))}}}, ::Optim.ManifoldObjective{OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}}) at C:\Users\arquie\.julia\packages\Optim\wi7Vp\src\utilities\perform_linesearch.jl:40
[5] update_state!(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Optim.GradientDescentState{Array{Float64,1},Float64}, ::GradientDescent{InitialPrevious{Float64},HagerZhang{Float64,Base.RefValue{Bool}},Optim.InverseDiagonal,getfield(Optim, Symbol("##56#62")){Array{Float64,1},Array{Float64,1},Fminbox{GradientDescent{InitialPrevious{Float64},HagerZhang{Float64,Base.RefValue{Bool}},Nothing,getfield(Optim, Symbol("##12#14"))},Float64,getfield(Optim, Symbol("##46#48"))}}}) at C:\Users\arquie\.julia\packages\Optim\wi7Vp\src\multivariate\solvers\first_order\gradient_descent.jl:74
[6] optimize(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}, ::GradientDescent{InitialPrevious{Float64},HagerZhang{Float64,Base.RefValue{Bool}},Optim.InverseDiagonal,getfield(Optim, Symbol("##56#62")){Array{Float64,1},Array{Float64,1},Fminbox{GradientDescent{InitialPrevious{Float64},HagerZhang{Float64,Base.RefValue{Bool}},Nothing,getfield(Optim, Symbol("##12#14"))},Float64,getfield(Optim, Symbol("##46#48"))}}}, ::Optim.Options{Float64,Nothing}, ::Optim.GradientDescentState{Array{Float64,1},Float64}) at C:\Users\arquie\.julia\packages\Optim\wi7Vp\src\multivariate\optimize\optimize.jl:57
[7] optimize at C:\Users\arquie\.julia\packages\Optim\wi7Vp\src\multivariate\optimize\optimize.jl:33 [inlined]
[8] optimize(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}, ::Array{Float64,1}, ::Array{Float64,1}, ::Fminbox{GradientDescent{InitialPrevious{Float64},HagerZhang{Float64,Base.RefValue{Bool}},Nothing,getfield(Optim, Symbol("##12#14"))},Float64,getfield(Optim, Symbol("##46#48"))}, ::Optim.Options{Float64,Nothing}) at C:\Users\arquie\.julia\packages\Optim\wi7Vp\src\multivariate\solvers\constrained\fminbox.jl:229
[9] #optimize#53(::Bool, ::Symbol, ::Function, ::Function, ::Array{Float64,1}, ::Array{Float64,1}, ::Array{Float64,1}, ::Fminbox{GradientDescent{InitialPrevious{Float64},HagerZhang{Float64,Base.RefValue{Bool}},Nothing,getfield(Optim, Symbol("##12#14"))},Float64,getfield(Optim, Symbol("##46#48"))}, ::Optim.Options{Float64,Nothing}) at C:\Users\arquie\.julia\packages\Optim\wi7Vp\src\multivariate\solvers\constrained\fminbox.jl:164
[10] optimize at C:\Users\arquie\.julia\packages\Optim\wi7Vp\src\multivariate\solvers\constrained\fminbox.jl:163 [inlined] (repeats 2 times)
[11] solve(::Float64, ::Bool) at .\In[19]:66
[12] solve(::Float64) at .\In[19]:19
[13] top-level scope at In[20]:1