I tried it in the REPL a couple of times. Once I hit fatal: error thrown and no exception handler available.
Curiously, the session didn’t crash. Here’s the full
stack trace
julia> train_discriminator!(batches, G, D, opt, hp) # try interrupting this
batch 1: 1.448166
batch 5: 1.4481609
batch 9: 1.4481605
batch 13: 1.4481593
batch 17: 1.4481587
batch 21: 1.4481585
batch 25: 1.4481581
^Cfatal: error thrown and no exception handler available.
InterruptException()
sigatomic_end at ./c.jl:437 [inlined]
task_done_hook at ./task.jl:542
jl_apply_generic at /Applications/Julia-1.7.app/Contents/Resources/julia/lib/julia/libjulia-internal.1.dylib (unknown line)
jl_finish_task at /Applications/Julia-1.7.app/Contents/Resources/julia/lib/julia/libjulia-internal.1.dylib (unknown line)
start_task at /Applications/Julia-1.7.app/Contents/Resources/julia/lib/julia/libjulia-internal.1.dylib (unknown line)
┌ Warning: temp cleanup
│ exception =
│ schedule: Task not runnable
│ Stacktrace:
│ [1] error(s::String)
│ @ Base ./error.jl:33
│ [2] enq_work(t::Task)
│ @ Base ./task.jl:628
│ [3] yield
│ @ ./task.jl:739 [inlined]
│ [4] yield
│ @ ./task.jl:737 [inlined]
│ [5] Channel{Tuple{String, Vector{String}, Vector{String}}}(func::Base.Filesystem.var"#31#34"{String}, size::Int64; taskref::Nothing, spawn::Bool)
│ @ Base ./channels.jl:138
│ [6] Channel (repeats 2 times)
│ @ ./channels.jl:131 [inlined]
│ [7] #walkdir#30
│ @ ./file.jl:953 [inlined]
│ [8] prepare_for_deletion(path::String)
│ @ Base.Filesystem ./file.jl:497
│ [9] temp_cleanup_purge(; force::Bool)
│ @ Base.Filesystem ./file.jl:532
│ [10] (::Base.var"#838#839")()
│ @ Base ./initdefs.jl:329
│ [11] _atexit()
│ @ Base ./initdefs.jl:350
└ @ Base.Filesystem file.jl:537
ERROR: TaskFailedException
nested task error: schedule: Task not runnable
Stacktrace:
[1] error(s::String)
@ Base ./error.jl:33
[2] schedule(t::Task, arg::Any; error::Bool)
@ Base ./task.jl:697
[3] schedule
@ ./task.jl:697 [inlined]
[4] uv_writecb_task(req::Ptr{Nothing}, status::Int32)
@ Base ./stream.jl:1110
[5] process_events
@ ./libuv.jl:104 [inlined]
[6] wait()
@ Base ./task.jl:838
[7] wait(c::Base.GenericCondition{Base.Threads.SpinLock})
@ Base ./condition.jl:123
[8] _wait(t::Task)
@ Base ./task.jl:293
[9] wait
@ ./task.jl:332 [inlined]
[10] threading_run(func::Function)
@ Base.Threads ./threadingconstructs.jl:38
[11] macro expansion
@ ./threadingconstructs.jl:97 [inlined]
[12] ∇conv_data_im2col!(dx::SubArray{Float32, 5, Array{Float32, 5}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, UnitRange{Int64}, Base.Slice{Base.OneTo{Int64}}}, false}, dy::SubArray{Float32, 5, Array{Float32, 5}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, UnitRange{Int64}, Base.Slice{Base.OneTo{Int64}}}, false}, w::SubArray{Float32, 5, Array{Float32, 5}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, UnitRange{Int64}}, true}, cdims::DenseConvDims{3, 3, 3, 6, 3}; col::Array{Float32, 3}, alpha::Float32, beta::Float32)
@ NNlib ~/.julia/packages/NNlib/hydo3/src/impl/conv_im2col.jl:146
[13] ∇conv_data_im2col!
@ ~/.julia/packages/NNlib/hydo3/src/impl/conv_im2col.jl:125 [inlined]
[14] (::NNlib.var"#271#275"{Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, DenseConvDims{3, 3, 3, 6, 3}, SubArray{Float32, 5, Array{Float32, 5}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, UnitRange{Int64}}, true}, SubArray{Float32, 5, Array{Float32, 5}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, UnitRange{Int64}, Base.Slice{Base.OneTo{Int64}}}, false}, SubArray{Float32, 5, Array{Float32, 5}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, UnitRange{Int64}, Base.Slice{Base.OneTo{Int64}}}, false}})()
@ NNlib ./threadingconstructs.jl:178
Stacktrace:
[1] sync_end(c::Channel{Any})
@ Base ./task.jl:381
[2] macro expansion
@ ./task.jl:400 [inlined]
[3] ∇conv_data!(out::Array{Float32, 5}, in1::Array{Float32, 5}, in2::Array{Float32, 5}, cdims::DenseConvDims{3, 3, 3, 6, 3}; kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ NNlib ~/.julia/packages/NNlib/hydo3/src/conv.jl:222
[4] ∇conv_data!
@ ~/.julia/packages/NNlib/hydo3/src/conv.jl:211 [inlined]
[5] ∇conv_data!(y::Array{Float32, 4}, x::Array{Float32, 4}, w::Array{Float32, 4}, cdims::DenseConvDims{2, 2, 2, 4, 2}; kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ NNlib ~/.julia/packages/NNlib/hydo3/src/conv.jl:145
[6] ∇conv_data!
@ ~/.julia/packages/NNlib/hydo3/src/conv.jl:145 [inlined]
[7] #∇conv_data#198
@ ~/.julia/packages/NNlib/hydo3/src/conv.jl:99 [inlined]
[8] ∇conv_data
@ ~/.julia/packages/NNlib/hydo3/src/conv.jl:98 [inlined]
[9] (::ConvTranspose{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}})(x::Array{Float32, 4})
@ Flux ~/.julia/packages/Flux/18YZE/src/layers/conv.jl:286
[10] macro expansion
@ ~/.julia/packages/Flux/18YZE/src/layers/basic.jl:53 [inlined]
[11] applychain(layers::Tuple{Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, var"#37#41", var"#38#42", ConvTranspose{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, var"#39#43", ConvTranspose{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, var"#40#44", ConvTranspose{2, 4, typeof(σ), Array{Float32, 4}, Vector{Float32}}}, x::Matrix{Float32})
@ Flux ~/.julia/packages/Flux/18YZE/src/layers/basic.jl:53
[12] Chain
@ ~/.julia/packages/Flux/18YZE/src/layers/basic.jl:51 [inlined]
[13] train_discriminator_step!(real::Array{Float32, 4}, G::Chain{Tuple{Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, var"#37#41", var"#38#42", ConvTranspose{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, var"#39#43", ConvTranspose{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, var"#40#44", ConvTranspose{2, 4, typeof(σ), Array{Float32, 4}, Vector{Float32}}}}, D::Chain{Tuple{Conv{2, 4, typeof(leakyrelu), Array{Float32, 4}, Vector{Float32}}, MaxPool{2, 4}, Conv{2, 4, typeof(leakyrelu), Array{Float32, 4}, Vector{Float32}}, MaxPool{2, 4}, typeof(Flux.flatten), Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, typeof(softmax)}}, opt::ADAM, hp::NamedTuple{(:latentdim, :batchsize), Tuple{Int64, Int64}})
@ Main ./REPL[300]:2
[14] train_discriminator!(realbatches::MLUtils.DataLoader{Array{Float32, 4}, Random._GLOBAL_RNG}, G::Chain{Tuple{Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, var"#37#41", var"#38#42", ConvTranspose{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, var"#39#43", ConvTranspose{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, var"#40#44", ConvTranspose{2, 4, typeof(σ), Array{Float32, 4}, Vector{Float32}}}}, D::Chain{Tuple{Conv{2, 4, typeof(leakyrelu), Array{Float32, 4}, Vector{Float32}}, MaxPool{2, 4}, Conv{2, 4, typeof(leakyrelu), Array{Float32, 4}, Vector{Float32}}, MaxPool{2, 4}, typeof(Flux.flatten), Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, typeof(softmax)}}, opt::ADAM, hp::NamedTuple{(:latentdim, :batchsize), Tuple{Int64, Int64}})
@ Main ./REPL[301]:6
[15] top-level scope
@ REPL[309]:1
[16] top-level scope
@ ~/.julia/packages/CUDA/qAl31/src/initialization.jl:52