Hi, I have a conceptional question:
I have 10 non-linear constraints for k=k_1, ..., k_{10} (that represent a discrete grid of a continuous variable) and 10 coefficients, \theta = \theta_1, ... \theta_{10}, that are determined by those:
u'\Big(\Psi(k) \cdot \theta, \gamma\Big) =\beta \, u'\Big( \Psi \big( f(k, \alpha, A) +(1- \delta)k- \Psi(k) \cdot \theta\big)\cdot \theta , \gamma\Big) \cdot \quad \quad \quad\quad\quad\quad\quad\quad\quad\Big( f'\big(f(k, \alpha, A) +(1- \delta)k - \Psi(k) \cdot \theta, \alpha, A\big) + (1-\delta) \Big)
\Psi evaluates 10 basis functions (in my case of a Chebyshev polynomials) used to approximate a univariate function.
While \Psi(k) can be evaluated before assembling the JuMP model, \Psi \big( f(k, \alpha, A) +(1- \delta)k- \Psi(k) \cdot \theta\big) is a function of the variables of the JuMP model: \beta, \alpha, A, \gamma, \delta, \theta[1:10].
I see two possible approaches:
- Register \Psi in JuMP and hope AutoDiff can handle it:
Chebyshev polynomials are defined iteratively, I believe AutoDiff should be able to do this in theory. But, using the BasisMatrices package throws an error:
MethodError: no method matching evalbasex!(::Array{Any,2}, ::Array{JuMP.NonlinearExpression,1}, ::BasisMatrices.ChebParams{Float64}, ::Array{JuMP.NonlinearExpression,1})
Closest candidates are:
evalbasex!(::AbstractArray{T,2} where T, ::AbstractArray{T<:Number,1}, ::BasisMatrices.ChebParams, ::AbstractArray{T<:Number,1}) where T<:Number at /home/juser/.julia/v0.6/BasisMatrices/src/cheb.jl:168
evalbasex!(::AbstractArray{T,2} where T, ::BasisMatrices.ChebParams, ::Any) at /home/juser/.julia/v0.6/BasisMatrices/src/cheb.jl:194
- Update the constraints every iteration of the optimization:
It is easy to calculate f(k, \alpha, A) +(1- \delta)k- \Psi(k) \cdot \theta every iteration, if I could extract it from the JuMP model and evaluate \Psi there, I could update the constraint. This is discussed here and here and not yet supported by JuMP.
Should I code up \Psi as “pure” as possible myself, use a different package, or wait for the updating functionality?
Here is the full model:
k_stst = 4.628988089138438 #reference point of the grid
basis = Basis(ChebParams(10, 0.2*k_stst, 2*k_stst))
K = nodes(basis)[1] #grid
Ψ = BasisMatrix(basis, Expanded(), K).vals[1] #\Psi(k)
function f(k, α, A)
A*k^α
end
function f_prime(k, α, A)
A*α*k^(α-1)
end
u_crra_prime(c, γ) = c.^-γ
@variable(m, β, start = 1)
@variable(m, δ, start = 1)
@variable(m, α, start = 1)
@variable(m, A, start = 1)
@variable(m, γ, start = 1)
@variable(m, θ[1:10], start = 0)
setvalue(θ[1], 0.001)
JuMP.register(m, :u_crra_prime, 2, u_crra_prime, autodiff=true)
JuMP.register(m, :f, 3, f, autodiff=true)
JuMP.register(m, :f_prime, 3, f_prime, autodiff=true)
@NLexpression(m, Kprime[i=1:10], f(K[i], α, A) + (1-δ)*K[i] - sum(Ψ[i, k] * θ[k] for k in 1:10)) #argument of \Psi
function Ψprime(k)
return BasisMatrix(Basis(ChebParams(10, 0.2*4.628988089138438, 2*4.628988089138438)), Expanded(), [k]).vals[1]
end
JuMP.register(m, :Ψprime, 1, Ψprime, autodiff=true)
@NLconstraint(m, EE[i=1:10], u_crra_prime(sum(Ψ[i, k] * θ[k] for k in 1:10), γ) ==
β*u_crra_prime(sum(Ψprime(Kprime[i])[k] * θ[k] for k in 1:10), γ) *
(f_prime(Kprime[i], α, A)))
The objective is a GMM-type function that compares predicted values against data:
@NLobjective(m, Min, sum((dataC[t] - predictedC[t])^2 for t in 1:100) +
sum((dataK[t] - predictedK[t])^2 for t in 2:100))