@tlienart no luck. Not sure if I did it correctly:
using Pkg
Pkg.develop(PackageSpec(url="https://github.com/tlienart/OpenSpecFun_jll.jl"))
using MLJ
X, y = @load_boston;
train, test = partition(eachindex(y), .7, rng=333);
@load LGBMRegressor
mdl = LGBMRegressor()
mach = machine(mdl, X, y)
fit!(mach, rows=train)
@load ARDRegressor
mdl = ARDRegressor()
mach = machine(mdl, X, y)
fit!(mach, rows=train)
PyError ($(Expr(:escape, :(ccall(#= /Users/AZevelev/.julia/packages/PyCall/zqDXB/src/pyfncall.jl:43 =# @pysym(:PyObject_Call), PyPtr, (PyPtr, PyPtr, PyPtr), o, pyargsptr, kw))))) <class 'numpy.linalg.LinAlgError'>
LinAlgError('unrecoverable internal error.')
File "/Users/AZevelev/.julia/conda/3/lib/python3.7/site-packages/sklearn/linear_model/_bayes.py", line 577, in fit
sigma_ = update_sigma(X, alpha_, lambda_, keep_lambda, n_samples)
File "/Users/AZevelev/.julia/conda/3/lib/python3.7/site-packages/sklearn/linear_model/_bayes.py", line 562, in update_sigma
X[:, keep_lambda].T))
File "/Users/AZevelev/.julia/conda/3/lib/python3.7/site-packages/sklearn/externals/_scipy_linalg.py", line 99, in pinvh
s, u = decomp.eigh(a, lower=lower, check_finite=False)
File "/Users/AZevelev/.julia/conda/3/lib/python3.7/site-packages/scipy/linalg/decomp.py", line 474, in eigh
raise LinAlgError("unrecoverable internal error.")
pyerr_check at exception.jl:60 [inlined]
pyerr_check at exception.jl:64 [inlined]
_handle_error(::String) at exception.jl:81
macro expansion at exception.jl:95 [inlined]
#110 at pyfncall.jl:43 [inlined]
disable_sigint at c.jl:446 [inlined]
__pycall! at pyfncall.jl:42 [inlined]
_pycall!(::PyCall.PyObject, ::PyCall.PyObject, ::Tuple{Array{Float64,2},Array{Float64,1}}, ::Int64, ::Ptr{Nothing}) at pyfncall.jl:29
_pycall!(::PyCall.PyObject, ::PyCall.PyObject, ::Tuple{Array{Float64,2},Array{Float64,1}}, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at pyfncall.jl:11
(::PyCall.PyObject)(::Array{Float64,2}, ::Vararg{Any,N} where N; kwargs::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at pyfncall.jl:86
(::PyCall.PyObject)(::Array{Float64,2}, ::Vararg{Any,N} where N) at pyfncall.jl:86
fit!(::PyCall.PyObject, ::Array{Float64,2}, ::Vararg{Any,N} where N; kwargs::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at Skcore.jl:100
fit!(::PyCall.PyObject, ::Array{Float64,2}, ::Array{Float64,1}) at Skcore.jl:100
fit(::ARDRegressor, ::Int64, ::NamedTuple{(:Crim, :Zn, :Indus, :NOx, :Rm, :Age, :Dis, :Rad, :Tax, :PTRatio, :Black, :LStat),NTuple{12,Array{Float64,1}}}, ::Array{Float64,1}) at ScikitLearn.jl:157
fit!(::Machine{ARDRegressor}; rows::Array{Int64,1}, verbosity::Int64, force::Bool) at machines.jl:183
(::StatsBase.var"#fit!##kw")(::NamedTuple{(:rows,),Tuple{Array{Int64,1}}}, ::typeof(fit!), ::Machine{ARDRegressor}) at machines.jl:146
top-level scope at MLJ_FitMachine_Error.jl:18