Enforcing nonneg. constraint in parameter Optimization

(I moved this post to “New to Julia”)
Hi, I am quite new to Julia, so this might be obvious, but I cannot figure it out.
I have a model consisting of parameters and a set of ODEs. In the MWE below, I just substitute by the SIR model.
I have been trying to do parameter estimation for an observable of the form log(x).
Where x is the third variable in my model.
That’s why I need optimization to be constrained to strictly positive values.

Now, independent of the solver used, I encounter the error: ERROR: LoadError: The algorithm Optim.IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol} does not support constraints. Either remove the cons function passed to OptimizationFunction or use a different algorithm..
Which comes from:

opt_func = Optimization.OptimizationFunction((ps,data) -> loss_function(ps),
cons=cons,)
opt_prob = Optimization.OptimizationProblem(opt_func,perturbed_params,data,lcons=[0,0],rcons=[Inf,Inf])
opt_sol = solve(opt_prob,optimiser,maxiters=maxiter)

MWE:

# SEIR test example

import Optimization,OptimizationOptimisers
import Plots
import DifferentialEquations
import Statistics
import Random
import Optim
using CommonSolve:solve

rng = Random.default_rng()

Random.seed!(rng,12)


true_params = [0.5,0.1]
t_span = (0.0,30.0)
offset = 10.0
noise_level = 10.0
IC = [100,10,0]

perturbed_params = [0.2,0.2]

function dynamics!(du,u,p,t)
    α, β = p
    du[1] = - α*u[1]*u[2]
    du[2] = α*u[1]*u[2]- β*u[2]
    du[3] = β*u[2]
end

ode_prob = DifferentialEquations.ODEProblem(dynamics!,IC,t_span,true_params)

function gen_sir_data(noise_level::Float64, offset::Float64)
    sol = solve(ode_prob,abstol=1e-12, reltol=1e-10)
    sol_arr = Array(sol[3,:])
    # add Gaussian noise
    ϵ = Statistics.mean(sol_arr,dims=2) .* noise_level ./ 100 .* randn(rng,size(sol.t))
    sol_arr .+= ϵ
    return (sol_arr,sol.t)
end

(data,t) = gen_sir_data(10.0,10.0)

function predict(parameters, ode_prob=ode_prob,t=t)
    pred = Array(solve(ode_prob,saveat=t,p=parameters))[3,:]
    return pred
end

# Calculate loss (only w.r.t. last var)
function loss_function(parameters)
    prediction = predict(parameters)
    return sum(abs2, prediction .- data)
end

cons(res,x,p) = (res.=p)

(optimiser,maxiter) = (Optim.Newton(),10000)

opt_func = Optimization.OptimizationFunction((ps,data) -> loss_function(ps),
cons=cons,)
opt_prob = Optimization.OptimizationProblem(opt_func,perturbed_params,data,lcons=zeros(size(t))
opt_sol = solve(opt_prob,optimiser,maxiters=maxiter)

Thanks in advance for any help !

P.S: This is my stacktrace:

ERROR: LoadError: The algorithm Optim.IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol} does not support constraints. Either remove the `cons` function passed to `OptimizationFunction` or use a different algorithm.
Stacktrace:
 [1] _check_opt_alg(prob::OptimizationProblem{true, OptimizationFunction{true, AutoForwardDiff{nothing, Nothing}, var"#1#2", Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, @Kwargs{}}, alg::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}; kwargs::@Kwargs{})
   @ SciMLBase ...\.julia\packages\SciMLBase\Dwomw\src\solve.jl:113
 [2] _check_opt_alg(prob::OptimizationProblem{true, OptimizationFunction{true, AutoForwardDiff{nothing, Nothing}, var"#1#2", Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, @Kwargs{}}, alg::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol})
   @ SciMLBase ...\.julia\packages\SciMLBase\Dwomw\src\solve.jl:108
 [3] solve(::OptimizationProblem{true, OptimizationFunction{true, AutoForwardDiff{nothing, Nothing}, var"#1#2", Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, @Kwargs{}}, ::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}; kwargs::@Kwargs{})
   @ SciMLBase ...julia\packages\SciMLBase\Dwomw\src\solve.jl:98
 [4] solve(::OptimizationProblem{true, OptimizationFunction{true, AutoForwardDiff{nothing, Nothing}, var"#1#2", Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, @Kwargs{}}, ::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol})
   @ SciMLBase ...\.julia\packages\SciMLBase\Dwomw\src\solve.jl:93
 [5] top-level scope
   @ ...
in expression starting at line 96.

I’d recommend switching to the IPOPT optimizer for this kind of case.