This approach seems to be not the best for divisioning data for parallel execution. Consider
julia> makechunks(collect(1:11), 4)
4-element Vector{SubArray{Int64, 1, Vector{Int64}, Tuple{UnitRange{Int64}}, true}}:
[1, 2]
[3, 4]
[5, 6]
[7, 8, 9, 10, 11]
Splitting data like this is going to bottleneck on the last one that contains the remainder, therefore, at worst the entire processing runs potentlally close to twice as slow as it could be.
I believe this is better:
function equal_partition(n::Int64, parts::Int64)
if n < parts
return [ x:x for x in 1:n ]
end
starts = push!(Int64.(round.(1:n/parts:n)), n+1)
return [ starts[i]:starts[i+1]-1 for i in 1:length(starts)-1 ]
end
function equal_partition(V::AbstractVector, parts::Int64)
ranges = equal_partition(length(V), parts)
return [ view(V,range) for range in ranges ]
end
julia> equal_partition(collect(1:11), 4)
4-element Vector{SubArray{Int64, 1, Vector{Int64}, Tuple{UnitRange{Int64}}, true}}:
[1, 2, 3]
[4, 5]
[6, 7, 8]
[9, 10, 11]
Edited to work better if n < parts (in which case it returns a smaller array).