I downloaded Julia 1.3.1.
versioninfo()
Julia Version 1.3.1
Commit 2d5741174c (2019-12-30 21:36 UTC)
Platform Info:
OS: macOS (x86_64-apple-darwin18.6.0)
CPU: Intel(R) Core(TM) i7-7920HQ CPU @ 3.10GHz
WORD_SIZE: 64
LIBM: libopenlibm
LLVM: libLLVM-6.0.1 (ORCJIT, skylake)
Environment:
JULIA_EDITOR = atom -a
JULIA_NUM_THREADS = 4
Most things seem to work well except when I train models in MLJ:
using MLJ
X, y = @load_boston;
train, test = partition(eachindex(y), .7, rng=333);
@load ARDRegressor
mdl = ARDRegressor()
mach = machine(mdl, X, y)
fit!(mach, rows=train)
fit!() gives the following in REPL:
Intel MKL ERROR: Parameter 15 was incorrect on entry to DSYEVR.
Here is the detailed message:
PyError ($(Expr(:escape, :(ccall(#= /Users/azevelev/.julia/packages/PyCall/kAhnQ/src/pyfncall.jl:43 =# @pysym(:PyObject_Call), PyPtr, (PyPtr, PyPtr, PyPtr), o, pyargsptr, kw))))) <class 'ValueError'>
ValueError('illegal value in argument 15 of internal syevr')
File "/Users/azevelev/.julia/conda/3/lib/python3.7/site-packages/sklearn/linear_model/bayes.py", line 562, in fit
sigma_ = update_sigma(X, alpha_, lambda_, keep_lambda, n_samples)
File "/Users/azevelev/.julia/conda/3/lib/python3.7/site-packages/sklearn/linear_model/bayes.py", line 547, in update_sigma
X[:, keep_lambda].T))
File "/Users/azevelev/.julia/conda/3/lib/python3.7/site-packages/sklearn/externals/_scipy_linalg.py", line 99, in pinvh
s, u = decomp.eigh(a, lower=lower, check_finite=False)
File "/Users/azevelev/.julia/conda/3/lib/python3.7/site-packages/scipy/linalg/decomp.py", line 472, in eigh
_check_info(info, driver, positive=False) # triage more specifically
File "/Users/azevelev/.julia/conda/3/lib/python3.7/site-packages/scipy/linalg/decomp.py", line 1201, in _check_info
% (-info, driver))
pyerr_check at exception.jl:60 [inlined]
pyerr_check at exception.jl:64 [inlined]
_handle_error(::String) at exception.jl:81
macro expansion at exception.jl:95 [inlined]
#110 at pyfncall.jl:43 [inlined]
disable_sigint at c.jl:446 [inlined]
__pycall! at pyfncall.jl:42 [inlined]
_pycall!(::PyCall.PyObject, ::PyCall.PyObject, ::Tuple{Array{Float64,2},Array{Float64,1}}, ::Int64, ::Ptr{Nothing}) at pyfncall.jl:29
_pycall!(::PyCall.PyObject, ::PyCall.PyObject, ::Tuple{Array{Float64,2},Array{Float64,1}}, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at pyfncall.jl:11
#_#117(::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}, ::PyCall.PyObject, ::Array{Float64,2}, ::Vararg{Any,N} where N) at pyfncall.jl:86
(::PyCall.PyObject)(::Array{Float64,2}, ::Vararg{Any,N} where N) at pyfncall.jl:86
#fit!#31(::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}, ::typeof(ScikitLearnBase.fit!), ::PyCall.PyObject, ::Array{Float64,2}, ::Vararg{Any,N} where N) at Skcore.jl:100
fit!(::PyCall.PyObject, ::Array{Float64,2}, ::Array{Float64,1}) at Skcore.jl:100
fit(::ARDRegressor, ::Int64, ::NamedTuple{(:Crim, :Zn, :Indus, :NOx, :Rm, :Age, :Dis, :Rad, :Tax, :PTRatio, :Black, :LStat),NTuple{12,Array{Float64,1}}}, ::Array{Float64,1}) at ScikitLearn.jl:157
#fit!#40(::Array{Int64,1}, ::Int64, ::Bool, ::typeof(fit!), ::Machine{ARDRegressor}) at machines.jl:165
(::StatsBase.var"#kw##fit!")(::NamedTuple{(:rows,),Tuple{Array{Int64,1}}}, ::typeof(fit!), ::Machine{ARDRegressor}) at none:0
top-level scope at MacError.jl:9
include_string(::Module, ::String, ::String) at sys.dylib:?
include_string(::Module, ::String, ::String, ::Int64) at eval.jl:30
(::Atom.var"#127#132"{String,Int64,String,Bool})() at eval.jl:94
withpath(::Atom.var"#127#132"{String,Int64,String,Bool}, ::String) at utils.jl:30
withpath(::Function, ::String) at eval.jl:47
#126 at eval.jl:93 [inlined]
with_logstate(::Atom.var"#126#131"{String,Int64,String,Bool}, ::Base.CoreLogging.LogState) at logging.jl:395
with_logger at logging.jl:491 [inlined]
#125 at eval.jl:92 [inlined]
hideprompt(::Atom.var"#125#130"{String,Int64,String,Bool}) at repl.jl:85
macro expansion at eval.jl:91 [inlined]
macro expansion at dynamic.jl:24 [inlined]
(::Atom.var"#124#129")(::Dict{String,Any}) at eval.jl:86
handlemsg(::Dict{String,Any}, ::Dict{String,Any}) at comm.jl:164
(::Atom.var"#19#21"{Array{Any,1}})() at task.jl:333
Here are the regression models that did not work:
[ "ARDRegressor", "BayesianRidgeRegressor", "ElasticNetCVRegressor",
"GaussianProcessRegressor", "LarsCVRegressor", "LarsRegressor",
"LassoCVRegressor", "LassoLarsCVRegressor", "LassoLarsICRegressor",
"LassoLarsRegressor", "OrthogonalMatchingPursuitCVRegressor",
"OrthogonalMatchingPursuitRegressor" ]