DimensionMismatch with MLJ

I’m trying to use MultitargetLinearRegressor in MLJ.jl. This is the mwe:

using MLJ, DataFrames
multiLinReg = @load MultitargetLinearRegressor pkg = MultivariateStats
# data
inputs = DataFrame(rand(136, 11), :auto)
preparePipe = ContinuousEncoder |> Standardizer
machPrepare = machine(preparePipe, inputs)
fit!(machPrepare, verbosity = 0)
inputs = MLJ.transform(machPrepare, inputs)
outputs = DataFrame(rand(136, 3), :auto)
# model and evaluate
lr = multiLinReg(; bias = false)
evLR = evaluate(
  lr, inputs, outputs,
  resampling = CV(nfolds = 5), measure = multitarget_l2
)

And I get the following error:

ERROR: DimensionMismatch: A has dimensions (28,11) but B has dimensions (10,3)
Stacktrace:
  [1] gemm_wrapper!(C::Matrix{…}, tA::Char, tB::Char, A::Matrix{…}, B::SubArray{…}, _add::LinearAlgebra.MulAddMul{…})
    @ LinearAlgebra C:\Users\usr\.julia\juliaup\julia-1.10.5+0.x64.w64.mingw32\share\julia\stdlib\v1.10\LinearAlgebra\src\matmul.jl:577
  [2] generic_matmatmul!
    @ C:\Users\usr\.julia\juliaup\julia-1.10.5+0.x64.w64.mingw32\share\julia\stdlib\v1.10\LinearAlgebra\src\matmul.jl:352 [inlined]
  [3] mul!
    @ C:\Users\usr\.julia\juliaup\julia-1.10.5+0.x64.w64.mingw32\share\julia\stdlib\v1.10\LinearAlgebra\src\matmul.jl:263 [inlined]
  [4] mul!
    @ C:\Users\usr\.julia\juliaup\julia-1.10.5+0.x64.w64.mingw32\share\julia\stdlib\v1.10\LinearAlgebra\src\matmul.jl:237 [inlined]
  [5] *(A::Matrix{Float64}, B::SubArray{Float64, 2, Matrix{Float64}, Tuple{UnitRange{…}, Base.Slice{…}}, false})    
    @ LinearAlgebra C:\Users\usr\.julia\juliaup\julia-1.10.5+0.x64.w64.mingw32\share\julia\stdlib\v1.10\LinearAlgebra\src\matmul.jl:113
  [6] _predict_regressor(fr::MLJMultivariateStatsInterface.LinearFitresult{…}, Xmat_new::Matrix{…}, prototype::DataFrame)
    @ MLJMultivariateStatsInterface C:\Users\usr\.julia\packages\MLJMultivariateStatsInterface\VOkse\src\models\linear_models.jl:67
  [7] predict(::MLJMultivariateStatsInterface.MultitargetLinearRegressor, fr::MLJMultivariateStatsInterface.LinearFitresult{…}, Xnew::DataFrame)
    @ MLJMultivariateStatsInterface C:\Users\usr\.julia\packages\MLJMultivariateStatsInterface\VOkse\src\models\linear_models.jl:104
  [8] predict(mach::Machine{…}; rows::UnitRange{…})
    @ MLJBase C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\operations.jl:86
  [9] predict
    @ C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\operations.jl:82 [inlined]
 [10] (::MLJBase.var"#294#305"{Machine{…}, UnitRange{…}})(op::typeof(predict))
    @ MLJBase .\none:0
 [11] iterate
    @ .\generator.jl:47 [inlined]
 [12] _all(f::Base.var"#384#386", itr::Base.Generator{Vector{…}, MLJBase.var"#294#305"{…}}, ::Colon)
    @ Base .\reduce.jl:1287
 [13] all
    @ .\reduce.jl:1283 [inlined]
 [14] Dict(kv::Base.Generator{Vector{typeof(predict)}, MLJBase.var"#294#305"{Machine{…}, UnitRange{…}}})
    @ Base .\dict.jl:111
 [15] fit_and_extract_on_fold
    @ C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\resampling.jl:1466 [inlined]
 [16] (::MLJBase.var"#277#278"{MLJBase.var"#fit_and_extract_on_fold#304"{…}, Machine{…}, Int64})(k::Int64)
    @ MLJBase C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\resampling.jl:1289
 [17] _mapreduce(f::MLJBase.var"#277#278"{…}, op::typeof(vcat), ::IndexLinear, A::UnitRange{…})
    @ Base .\reduce.jl:440
 [18] _mapreduce_dim
    @ .\reducedim.jl:365 [inlined]
 [19] mapreduce
    @ .\reducedim.jl:357 [inlined]
 [20] _evaluate!(func::MLJBase.var"#fit_and_extract_on_fold#304"{…}, mach::Machine{…}, ::CPU1{…}, nfolds::Int64, verbosity::Int64)
    @ MLJBase C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\resampling.jl:1288
 [21] evaluate!(mach::Machine{…}, resampling::Vector{…}, weights::Nothing, class_weights::Nothing, rows::Nothing, verbosity::Int64, repeats::Int64, measures::Vector{…}, operations::Vector{…}, acceleration::CPU1{…}, force::Bool, per_observation_flag::Bool, logger::Nothing, user_resampling::CV, compact::Bool)
    @ MLJBase C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\resampling.jl:1510
 [22] evaluate!(::Machine{…}, ::CV, ::Nothing, ::Nothing, ::Nothing, ::Int64, ::Int64, ::Vector{…}, ::Vector{…}, ::CPU1{…}, ::Bool, ::Bool, ::Nothing, ::CV, ::Bool)
    @ MLJBase C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\resampling.jl:1603
 [23] evaluate!(mach::Machine{…}; resampling::CV, measures::Nothing, measure::StatisticalMeasuresBase.FussyMeasure{…}, weights::Nothing, class_weights::Nothing, operations::Nothing, operation::Nothing, acceleration::CPU1{…}, rows::Nothing, repeats::Int64, force::Bool, check_measure::Bool, per_observation::Bool, verbosity::Int64, logger::Nothing, compact::Bool)
    @ MLJBase C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\resampling.jl:1232
 [24] evaluate(::MLJMultivariateStatsInterface.MultitargetLinearRegressor, ::DataFrame, ::Vararg{…}; cache::Bool, kwargs::@Kwargs{…})
    @ MLJBase C:\Users\usr\.julia\packages\MLJBase\7nGJF\src\resampling.jl:1262
 [25] top-level scope
    @ c:\Users\usr\OneDrive\Área de Trabalho\mwe\mwe.jl:6

Including a bias in the model enables this mwe to run. But, in my original code, if I use bias and/or standardization, the cholesky factorization fails due to the matrix not being positive definite.

I’m not sure how to avoid both the factorization and the dimension mismatch problems. Suggestions are appreciated.