Automatic differentiation through DataFrame or any other DataFrame-like tables

Hi, I’m inspecting some functionality of DiffEqFlux.jl by modifying DiffEqFlux.jl’s examples.

I wonder if it’s possible to save trajectory data as DataFrame and take automatic differentiation (via e.g. Zygote) through the DataFrame.
Is it possible?

I tried some, but I failed.

1 Like

I leave some observations.

  • AD works
  1. inserting trajectory data a new array works, e.g., output = [dummy, trajectory_data]
  2. inserting trajectory data a new NamedTuple works, e.g., output = (; dummy=dummy, u=trajectory_data)
  • AD not works
  1. inserting trajectory data a new Dict works, e.g., output = Dict(:dummy => dummy, :u => trajectory_data)
    • Error message
      ERROR: Compiling Tuple{Type{Dict}, Tuple{Pair{String, Vector{Float64}}, Pair{String, Vector{Vector{Float64}}}}}: try/catch is not supported.
      Stacktrace:
      [1] error(s::String)
        @ Base ./error.jl:33
      [2] instrument(ir::IRTools.Inner.IR)
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/reverse.jl:121
      [3] #Primal#20
        @ ~/.julia/packages/Zygote/bJn8I/src/compiler/reverse.jl:202 [inlined]
      [4] Zygote.Adjoint(ir::IRTools.Inner.IR; varargs::Nothing, normalise::Bool)
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/reverse.jl:315
      [5] _generate_pullback_via_decomposition(T::Type)
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/emit.jl:101
      [6] #s3063#1218
        @ ~/.julia/packages/Zygote/bJn8I/src/compiler/interface2.jl:28 [inlined]
      [7] var"#s3063#1218"(::Any, ctx::Any, f::Any, args::Any)
        @ Zygote ./none:0
      [8] (::Core.GeneratedFunctionStub)(::Any, ::Vararg{Any})
        @ Core ./boot.jl:580
      [9] _pullback
        @ ./dict.jl:125 [inlined]
      [10] _pullback(::Zygote.Context, ::Type{Dict}, ::Pair{String, Vector{Float64}}, ::Pair{String, Vector{Vector{Float64}}})
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/interface2.jl:0
      [11] _pullback
        @ ~/.julia/dev/ContinuousTimePolicyGradients/test/model-estimation/toy.jl:44 [inlined]
      [12] _pullback(::Zygote.Context, ::var"#predict_n_ode#168"{Vector{Float32}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}, Vector{Float64}})
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/interface2.jl:0
      [13] _pullback
        @ ~/.julia/dev/ContinuousTimePolicyGradients/test/model-estimation/toy.jl:52 [inlined]
      [14] _pullback(::Zygote.Context, ::var"#loss_n_ode#169"{var"#predict_n_ode#168"{Vector{Float32}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}, Vector{Float64}}, Dict{String, Vector}})
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/interface2.jl:0
      [15] _apply
        @ ./boot.jl:814 [inlined]
      [16] adjoint
        @ ~/.julia/packages/Zygote/bJn8I/src/lib/lib.jl:200 [inlined]
      [17] _pullback
        @ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65 [inlined]
      [18] _pullback
        @ ~/.julia/packages/Flux/BPPNj/src/optimise/train.jl:105 [inlined]
      [19] _pullback(::Zygote.Context, ::Flux.Optimise.var"#39#45"{var"#loss_n_ode#169"{var"#predict_n_ode#168"{Vector{Float32}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}, Vector{Float64}}, Dict{String, Vector}}, Tuple{}})
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/interface2.jl:0
      [20] pullback(f::Function, ps::Params)
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/interface.jl:351
      [21] gradient(f::Function, args::Params)
        @ Zygote ~/.julia/packages/Zygote/bJn8I/src/compiler/interface.jl:75
      [22] macro expansion
        @ ~/.julia/packages/Flux/BPPNj/src/optimise/train.jl:104 [inlined]
      [23] macro expansion
        @ ~/.julia/packages/Juno/n6wyj/src/progress.jl:134 [inlined]
      [24] train!(loss::Function, ps::Params, data::Base.Iterators.Take{Base.Iterators.Repeated{Tuple{}}}, opt::ADAM; cb::var"#164#170"{var"#164#165#171"{var"#predict_n_ode#168"{Vector{Float32}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}, Vector{Float64}}, Dict{String, Vector}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}})
        @ Flux.Optimise ~/.julia/packages/Flux/BPPNj/src/optimise/train.jl:102
      [25] main()
        @ Main ~/.julia/dev/ContinuousTimePolicyGradients/test/model-estimation/toy.jl:74
      [26] top-level scope
        @ REPL[25]:1
      [27] top-level scope
        @ ~/.julia/packages/CUDA/YpW0k/src/initialization.jl:52