I’m trying to optimize quadratic function to learn julia as below, but dind’t work.
Can anyone tell me what is wrong?
input
using PyPlot
using ForwardDiff
x_opt = 0.50
f(x) = -2(x - x_opt)^2
xs = range(-3, 3, length = 100)
fig, ax = subplots()
ax.plot(xs, f.(xs))
ax = grid()
function gradient_method_dim1(f, x_init, eta, maxiter)
x_seq = Array{typeof(x_init), 1}(undef, maxiter)
Dxf(x) = ForwardDiff.derivative(f, x)
x_seq[1] = x_init
for i in 2:maxiter
x_seq[i] = x_seq[i-1] + eta*f'(x_seq[i-1])
end
x_seq
end
x_init = -2.5
maxiter = 100
eta = 0.1
x_seq = gradient_method_dim1(f, x_init, eta, maxiter)
f_seq = f.(x_seq)
println(f_seq)
output
ERROR: LoadError: MethodError: no method matching adjoint(::typeof(f))
Closest candidates are:
adjoint(::Union{LinearAlgebra.QR, LinearAlgebra.QRCompactWY, LinearAlgebra.QRPivoted}) at /opt/julia-1.7.2/share/julia/stdlib/v1.7/LinearAlgebra/src/qr.jl:509
adjoint(::Union{LinearAlgebra.Cholesky, LinearAlgebra.CholeskyPivoted}) at /opt/julia-1.7.2/share/julia/stdlib/v1.7/LinearAlgebra/src/cholesky.jl:538
adjoint(::LinearAlgebra.SVD) at /opt/julia-1.7.2/share/julia/stdlib/v1.7/LinearAlgebra/src/svd.jl:262
...
Stacktrace:
[1] gradient_method_dim1(f::Function, x_init::Float64, eta::Float64, maxiter::Int64)
@ Main ~/path/to/optimization:22
[2] top-level scope
@ ~/path/to/optimization.jl:32
[3] include(fname::String)
@ Base.MainInclude ./client.jl:451
[4] top-level scope
@ REPL[49]:1
in expression starting at /path/to/optimization.jl:32