Hello 
here is my code. Is it OK for you, do you need some explanations ? Thanks !
using DynmicPolynomials, JuMP,Hypatia,DiffOpt
import MathOptInterface; const MOI = MathOptInterface
function get_expect_with_deriv(Obs,
_p0_r,_p0_i,
_p1_r,_p1_i,
_p2_r,_p2_i,
_p3_r,_p3_i,
_alpha_r,_alpha_i)
# write here the computing of the value, and the grad_coeff which is a NamedTuple
return e_val, grad_coeff
end
function diff_data_cat(dict_data, deg_max,
p0_r, p0_i,
p1_r, p1_i,
p2_r, p2_i,
p3_r, p3_i,
alpha_r, alpha_i
)
model = Model(() -> DiffOpt.diff_optimizer(Hypatia.Optimizer))
#
Ncons = length(dict_data)
@variable(model, T[1:Ncons] >= 0)
soc_refs = NamedTuple[] # vecteur générique de NamedTuple
it = 1
for (obs,val) in dict_data
e, _ = get_expect_with_deriv(TermToExpr(obs), p0_r,p0_i,p1_r,p1_i,p2_r,p2_i,p3_r,p3_i,alpha_r , alpha_i)
# get (e) and it's derivative. Espectation with the parameters
cref = @constraint(model,
[ T[it],
real(e) - real(val),
imag(e) - imag(val)
] in SecondOrderCone() # Check the distance between (e) and (val). Minimise it (with T[it] )
)
push!(soc_refs, (obs=obs, val=val, cref=cref, idx=it))
it += 1
end
@objective(model, Min, sum(T))
optimize!(model)
for i_deriv in 1:8
MOI.set(model, DiffOpt.ForwardObjectiveFunction(), 0.0) # The Objective is constant. T does not depend on the parameters p ... alpha
for entry in soc_refs
cref = entry.cref
_, de = get_expect_with_deriv(TermToExpr(entry.obs),
p0_r,p0_i, p1_r,p1_i, p2_r,p2_i, p3_r,p3_i, alpha_r,alpha_i
) # get the derivative of the expectation
dval = ComplexF64(de[i_deriv])
vec = Vector{Float64}([
0.0,
real(dval), # ∂Re(e)/∂param
imag(dval) # ∂Im(e)/∂param
]) # get the derivative of T[it] ...
MOI.set(model, DiffOpt.ForwardConstraintFunction(), cref, vec)
end
DiffOpt.forward_differentiate!(model) # compute the derivatives
# here is the reference of the bug
# Compute and store the grad
end
return objective_value(model), grad_T
end
ERROR: Trying to compute the forward differentiation on a model with termination status OPTIMIZE_NOT_CALLED
Stacktrace:
[1] error(s::String)
@ Base ./error.jl:35
[2] forward_differentiate!(model::DiffOpt.Optimizer{MathOptInterface.Utilities.CachingOptimizer{…}})
@ DiffOpt ~/.julia/packages/DiffOpt/FJjnq/src/moi_wrapper.jl:553
[3] forward_differentiate!
@ ~/.julia/packages/DiffOpt/FJjnq/src/jump_moi_overloads.jl:393 [inlined]
[4] forward_differentiate!(model::MathOptInterface.Utilities.CachingOptimizer{MathOptInterface.Bridges.LazyBridgeOptimizer{…}, MathOptInterface.Utilities.UniversalFallback{…}})
@ DiffOpt ~/.julia/packages/DiffOpt/FJjnq/src/jump_moi_overloads.jl:378
[5] forward_differentiate!(model::Model)
@ DiffOpt ~/.julia/packages/DiffOpt/FJjnq/src/jump_moi_overloads.jl:363
[6] diff_data_cat(dict_data::Dict{…}, deg_max::Int64, p0_r::Float64, p0_i::Float64, p1_r::Float64, p1_i::Float64, p2_r::Float64, p2_i::Float64, p3_r::Float64, p3_i::Float64, alpha_r::Float64, alpha_i::Float64)
@ Main ~/source/Cat_4legs/fit_cat.jl:236
[7] test_diff_cat(alpha::Float64, Nmax::Int64, deg_max::Int64)
@ Main ~/source/Cat_4legs/fit_cat.jl:289
[8] top-level scope
@ REPL[6]:1