Help with `@generated` with the Cartesian module

Hi,
I would like to ask for help generalizing the following construction that uses Base.Cartesian for automatic creation of loops. First, the particular case n=8:

@macroexpand begin
@inbounds for i_8 in eachindex(v[8])
        @nloops 7 i i -> eachindex(v[i]) d -> d == 1 ? nothing : d == 7 ? cache[d-1] = v[i_{d}, d] * v[i_{d+1}, d+1] : cache[d-1] = cache[d] * v[i_{d}, d] begin (@nref 8 A i) = v[i_1, 1] * cache[1] end
end
end
quote  # In[51], line 2:
    begin 
        $(Expr(:inbounds, true))
        for i_8 = eachindex(v[8]) # In[51], line 3:
            begin  # cartesian.jl, line 62:
                for i_7 = eachindex(v[7]) # cartesian.jl, line 63:
                    cache[6] = v[i_7, 7] * v[i_8, 8] # cartesian.jl, line 64:
                    begin  # cartesian.jl, line 62:
                        for i_6 = eachindex(v[6]) # cartesian.jl, line 63:
                            cache[5] = cache[6] * v[i_6, 6] # cartesian.jl, line 64:
                            begin  # cartesian.jl, line 62:
                                for i_5 = eachindex(v[5]) # cartesian.jl, line 63:
                                    cache[4] = cache[5] * v[i_5, 5] # cartesian.jl, line 64:
                                    begin  # cartesian.jl, line 62:
                                        for i_4 = eachindex(v[4]) # cartesian.jl, line 63:
                                            cache[3] = cache[4] * v[i_4, 4] # cartesian.jl, line 64:
                                            begin  # cartesian.jl, line 62:
                                                for i_3 = eachindex(v[3]) # cartesian.jl, line 63:
                                                    cache[2] = cache[3] * v[i_3, 3] # cartesian.jl, line 64:
                                                    begin  # cartesian.jl, line 62:
                                                        for i_2 = eachindex(v[2]) # cartesian.jl, line 63:
                                                            cache[1] = cache[2] * v[i_2, 2] # cartesian.jl, line 64:
                                                            begin  # cartesian.jl, line 62:
                                                                for i_1 = eachindex(v[1]) # cartesian.jl, line 63:
                                                                    nothing # cartesian.jl, line 64:
                                                                    begin  # In[51], line 3:
                                                                        A[i_1, i_2, i_3, i_4, i_5, i_6, i_7, i_8] = v[i_1, 1] * cache[1]
                                                                    end # cartesian.jl, line 65:
                                                                    nothing
                                                                end
                                                            end # cartesian.jl, line 65:
                                                            nothing
                                                        end
                                                    end # cartesian.jl, line 65:
                                                    nothing
                                                end
                                            end # cartesian.jl, line 65:
                                            nothing
                                        end
                                    end # cartesian.jl, line 65:
                                    nothing
                                end
                            end # cartesian.jl, line 65:
                            nothing
                        end
                    end # cartesian.jl, line 65:
                    nothing
                end
            end
        end
        $(Expr(:inbounds, :pop))
    end
end

My attempt for generalization to n dimensions inside a function is to use @generated (as suggested in the docs),

@generated function y(A, v::VectorOfArray)
    quote
    n = length(v)
    cache = Vector{Float64}($n-2)
    @inbounds for i_{$n} in eachindex(v[$n])
                        @nloops ($n-1) i i -> eachindex(v[i]) d -> d == 1 ? nothing : d == $n-1 ? cache[d-1] = v[i_{d}, d] * v[i_{d+1}, d+1] : cache[d-1] = cache[d] * v[i_{d}, d] begin (@nref $n A i) = v[i_1, 1] * cache[1] end
                    end
    end
end

Test:

t = [1.0, 1.5, 2.22222, 3.25, 4.69048, 6.67857, 9.38095, 13.0, 17.7778, 24.0, 32.0]
v = VectorOfArray([t for i in 1:8]) 
A = Array{Float64}(11,11,11,11,11,11,11,11);
F = y(A, v);
MethodError: no method matching _nloops(::Expr, ::Symbol, ::Expr, ::Expr, ::Expr)
Closest candidates are:
  _nloops(::Int64, ::Symbol, ::Expr, ::Expr...) at cartesian.jl:48
  _nloops(::Int64, ::Symbol, ::Symbol, ::Expr...) at cartesian.jl:43

Taking off the @generated helps you understand what is going on.
Often, I’ll split off the function that generates the expression, from the generated function itself. That is

function generate_quote([necessary type information])
    ....
end
@generated generated_function(...)
    generate_quote(necessary type information)
end

Then, you can just call generate_quote to see if the code looks good, before calling the generated function itself.
In this case, you will notice that generating the quote itself throws an error, because n is not defined!
Inside the quote it is trying to define n, not outside.
However, you also cannot move it outside:

@generated function y(A, v::VectorOfArray)
    n = length(v)
    quote
    cache = Vector{Float64}($n-2)
    @inbounds for $(Symbol(:i_, n)) in eachindex(v[$n])
                        @nloops ($n-1) i i -> eachindex(v[i]) d -> d == 1 ? nothing : d == $n-1 ? cache[d-1] = v[i_{d}, d] * v[i_{d+1}, d+1] : cache[d-1] = cache[d] * v[i_{d}, d] begin (@nref $n A i) = v[i_1, 1] * cache[1] end
                    end
    end
end

because in the generated function, you only have information on the types, not the actual values.
Thus, this would work:

@generated function y(A, v::VectorOfArray{T,n}) where {T,n}
    quote
    cache = Vector{Float64}($n-2)
    @inbounds for $(Symbol(:i_, n)) in eachindex(v[$n])
                        @nloops ($n-1) i i -> eachindex(v[i]) d -> d == 1 ? nothing : d == $n-1 ? cache[d-1] = v[i_{d}, d] * v[i_{d+1}, d+1] : cache[d-1] = cache[d] * v[i_{d}, d] begin (@nref $n A i) = v[i_1, 1] * cache[1] end
                    end
    end
end

Now, it takes n from the dimensionality of the VectorOfArray.
However, this n is not the length of the VectorOfArray, but the number of dimensions. That is, if VectorOfArray contains is a vector of matrices, it is basically a rank 3 tensor, regardless of how many matrices it contains:

julia> v = VectorOfArray([randn(4,4) for i ∈ 1:40]);

julia> typeof(v), length(v)
(RecursiveArrayTools.VectorOfArray{Float64,3,Array{Array{Float64,2},1}}, 40)

You do not have compile time access to the length. You could pass it is a Val argument:

y(A, v) = generated_y(A, v, Val(length(y))::nothing #Dynamic dispatch
@generated function generated_y(A, v::VectorOfArray, ::Val{n}) where n
    quote
    cache = Vector{Float64}($n-2)
    @inbounds for $(Symbol(:i_, n)) in eachindex(v[$n])
                        @nloops ($n-1) i i -> eachindex(v[i]) d -> d == 1 ? nothing : d == $n-1 ? cache[d-1] = v[i_{d}, d] * v[i_{d+1}, d+1] : cache[d-1] = cache[d] * v[i_{d}, d] begin (@nref $n A i) = v[i_1, 1] * cache[1] end
                    end
    end
end

but, while I haven’t looked at what your function is actually trying to do it too closely to understand it, I have a hard time imagining why you would want size(v,1) nested loops. Imagine v were a 100x100 matrix, that would require 100^100 iterations!

Also, given the @nref, maybe A has the correct number of axis?
In which case this should be fine:

@generated function y(A::AbstractArray{T,n}, v::VectorOfArray) where {T,n}
    quote
    cache = Vector{Float64}($n-2)
    @inbounds for $(Symbol(:i_, n)) in eachindex(v[$n])
                        @nloops ($n-1) i i -> eachindex(v[i]) d -> d == 1 ? nothing : d == $n-1 ? cache[d-1] = v[i_{d}, d] * v[i_{d+1}, d+1] : cache[d-1] = cache[d] * v[i_{d}, d] begin (@nref $n A i) = v[i_1, 1] * cache[1] end
                    end
    end
end
4 Likes

Thanks for this great answer!!!

You’re right, i can use the dimensions of the output array A. Both solutions work like a charm! Minor thing: i had to change @nloops ($n-1) ... to @nloops $(n-1) ..., otherwise i get a MethodError: no method matching _nloops(::Expr, ::Symbol, ::Expr, ::Expr, ::Expr).

This code is to compute the tensor form of the Bernstein expansion of a multivariate monomial, that’s why it has an exponential complexity. There exist techniques to work with these expansions lazily, it’s also in the roadmap of our BernsteinExpansions.jl package…

(BTW, the particular case can be done simply with:

using TensorOperations

y(v) = @tensor A[i1,i2,i3,i4,i5,i6,i7,i8] :=  v[i1, 1] * v[i2, 2] * v[i3, 3] * v[i4, 4] * v[i5, 5] * v[i6, 6] * v[i7, 7] * v[i8, 8]

.)

1 Like