I’m trying to read .jld2 files from a cluster dfs share and keep getting stuff like:
BoundsError: attempt to access 23-element Vector{Int64} at index [31]
Stacktrace:
[1] getindex
@ ./array.jl:801 [inlined]
[2] getindex
@ ./abstractarray.jl:1173 [inlined]
[3] read_heap_object(f::JLD2.JLDFile{JLD2.MmapIO}, hid::JLD2.GlobalHeapID, rr::JLD2.ReadRepresentation{JLD2.RelOffset, JLD2.RelOffset})
@ JLD2 ~/.julia/packages/JLD2/sFiXk/src/global_heaps.jl:130
[4] jlconvert
@ ~/.julia/packages/JLD2/sFiXk/src/data/writing_datatypes.jl:327 [inlined]
[5] types_from_refs(f::JLD2.JLDFile{JLD2.MmapIO}, ptr::Ptr{Nothing})
@ JLD2 ~/.julia/packages/JLD2/sFiXk/src/data/reconstructing_datatypes.jl:290
[6] jlconvert(rr::JLD2.ReadRepresentation{DataType, JLD2.OnDiskRepresentation{(0, 16), Tuple{String, Vector{Any}}, Tuple{JLD2.Vlen{String}, JLD2.Vlen{JLD2.RelOffset}}}()}, f::JLD2.JLDFile{JLD2.MmapIO}, ptr::Ptr{Nothing}, header_offset::JLD2.RelOffset)
@ JLD2 ~/.julia/packages/JLD2/sFiXk/src/data/reconstructing_datatypes.jl:311
There is a closed (pre/post the jld2 dev hiatus?) issue on github where a workaround for writing was suggested, but the following adaptation (I really have no idea what I’m doing here) fails:
jldopen(fn, true, true, true, IOStream) do file
for (key, value) in dictionary
check2 = read(file, key, value)
end
end
ERROR: UndefVarError: dictionary not defined
Stacktrace:
[1] (::var"#15#16")(file::JLD2.JLDFile{IOStream})
@ Main ./REPL[33]:2
[2] jldopen(::var"#15#16", ::String, ::Vararg{Any, N} where N; kws::Base.Iterators.Pairs{Union{}, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ JLD2 ~/.julia/packages/JLD2/sFiXk/src/loadsave.jl:4
[3] jldopen(::Function, ::String, ::Bool, ::Bool, ::Bool, ::Type)
@ JLD2 ~/.julia/packages/JLD2/sFiXk/src/loadsave.jl:2
[4] top-level scope