MWE⦠packages:
using LinearAlgebra;
using BlackBoxOptim;
Data: taken from Simple Linear Regression Examples: Real Life Problems & Solutions
X = [1.7, 1.5, 2.8, 5, 1.3, 2.2, 1.3] |> x->reshape(x,(1,length(x)))
Y = [368, 340, 665, 954, 331, 556, 375] |> x->reshape(x,(1,length(x)))
#
x = range(minimum(X),maximum(X),length=50);
Case of linear regression with linear polynomial basis function:
Ļ(x) = [1, x];
Φ = Ļ.(X) |> x -> reduce(hcat,x)
#
β_lin = Y/Φ;
L_lin = norm(Y-β_lin*Φ);
#
f(x;β=β) = β*Ļ(x)
ylin = f.(x;β=β_lin) |> x-> reduce(vcat,x);
Higher order polynomials in the basis function⦠just add terms in Ļ(x).
Hand-tuned location (mean M) and spread (standard deviation Σ) in gaussian basis functions:
M = [1.5,2.5,5]
Ī£ = [0.5,3,4]
Ļ(x) = [exp(-0.5*(x-μ)^2/Ļ^2) for (μ,Ļ) ā zip(M,Ī£)]
Φ = Ļ.(X) |> x -> reduce(hcat,x)
#
β_gauss = Y/Φ
L_gauss = norm(Y-β_gauss*Φ)
#
f(x;β=β) = β*Ļ(x)
ygauss = f.(x;β=β_gauss) |> x->reduce(vcat,x);
BlackBoxOptim formulation:
# Range of parameters: beta_i, mu_i, sigma_i
p_range=[fill((-1.e4,1.e4),3)...,fill((1.5,5.),3)...,fill((0.1,20.),3)...];
#
# Loss function for BlackBoxOptim
#
L = p -> begin
np = Int(length(p)/3)
β = p[1:np]'
M = p[np+1:2np]
Ī£ = p[2np+1:end]
Ļ = zip(M,Ī£)
Φ = Ļ.(X;Ī©=Ļ) |> x->reduce(hcat,x)
return norm(Y-β*Φ)
end
#
# Optimization using BBO
#
res_bb = bboptimize(L; SearchRange = p_range, TraceMode = :silent);
#
# Unpacking results
#
p_bb = best_candidate(res_bb)
L_bb = best_fitness(res_bb);
#
np = Int(length(p_bb)/3)
β_bb = p_bb[1:np]'
M_bb = p_bb[np+1:2np]
Ī£_bb = p_bb[2np+1:end]
Ļ_bb = zip(M_bb,Ī£_bb)
#
f(x;β=β) = β*Ļ(x;Ī©=Ļ_bb)
ybbgauss = f.(x;β=β_bb) |> x->reduce(vcat,x);
I hope the code is readable.