Some code I wrote in May for MLJ.jl:
function err(ŷ, y)
return ŷ - y
end
function perr(ŷ, y; tol = eps())
e = err(ŷ, y)
p = 100 * e[y .!= 0] ./ y[y .!= 0]
return p
end
using Statistics
mse(ŷ, y) = err(ŷ, y) |> (x)->x.^2 |> mean
rmse(ŷ, y) = err(ŷ, y) |> x -> x.^2 |> mean |> sqrt
mae(ŷ, y) = err(ŷ, y) |> x -> abs.(x) |> mean
mdae(ŷ, y) = err(ŷ, y) |> x -> abs.(x) |> median
rmdse(ŷ, y) = err(ŷ, y) |> x -> x.^2 |> median |> sqrt
maxae(ŷ, y) = err(ŷ, y) |> x -> abs.(x) |> maximum
# tutorial Root mean square error
rmse2(ŷ, y) = (sqrt ∘ mean ∘ (x->x.^2) ∘ err)(ŷ, y)
# tutorial mean absolute error
mae2(ŷ, y) = (mean ∘ (x->abs.(x)) ∘ err)(ŷ, y)
rmsl(ŷ, y) = err(log.(ŷ), log.(y)) |> x->x.^2 |> mean |> sqrt
rmslp1(ŷ, y) = err(log.(ŷ.+1), log.(y.+1)) |> x->x.^2 |> mean |> sqrt
rss(ŷ, y) = err(ŷ, y) |> (x)->x.^2 |> sum # AKA SSE
tss(ŷ, y) = err(zero(y) .+ mean(y), y) |> (x)->x.^2 |> sum
ess(ŷ, y) = err(ŷ, zero(y) .+ mean(y)) |> (x)->x.^2 |> sum
tss2(ŷ, y) = mean(y) .- y |> (x)->x.^2 |> sum
ess2(ŷ, y) = mean(y) .- ŷ |> (x)->x.^2 |> sum
rss2(ŷ, y) = y .- ŷ |> (x)->x.^2 |> sum
rsq2(ŷ, y) = ess2(ŷ, y) / tss2(ŷ, y)
# linear model verify
# Mean Percentage Error (MPE) is a measure of bias
mpe(ŷ, y) = perr(ŷ, y) |> mean
mape(ŷ, y) = perr(ŷ, y) |> x -> abs.(x) |> mean
mdape(ŷ, y) = perr(ŷ, y) |> x -> abs.(x) |> median
rmspe(ŷ, y) = perr(ŷ, y) |> x -> x.^2 |> mean |> sqrt
rmdspe(ŷ, y) = perr(ŷ, y) |> x -> x.^2 |> median |> sqrt