I’m trying to define a custom ForwardDiff.jl differentiation rule for the bivariate normal cumulative function BvN(h, k, r). What I did so far works for first derivatives, but it doesn’t work for mixed higher derivatives. This is what I have so far:
import ForwardDiff: Dual, value, partials, derivative, ≺, @define_ternary_dual_op
@inline function calc_BvN(h, k, r, ::Type{T}) where T
vh = value(h)
vk = value(k)
vr = value(r)
val = BvN(vh, vk, vr)
p = ∂hBvN(vh, vk, vr) * partials(h) + ∂kBvN(vh, vk, vr) * partials(k) + ∂rBvN(vh, vk, vr) * partials(r)
return Dual{T}(val, p)
end
@define_ternary_dual_op(
BvN,
calc_BvN(x, y, z, Txyz),
calc_BvN(x, y, z, Txy),
calc_BvN(x, y, z, Txz),
calc_BvN(x, y, z, Tyz),
calc_BvN(x, y, z, Tx),
calc_BvN(x, y, z, Ty),
calc_BvN(x, y, z, Tz),
)
derivative(h -> BvN(h, .3, .5), .6) # gives the correct result
derivative(h -> derivative(k -> BvN(h, k, .4), .3), .6) # gives 0 which is wrong