@roflmaostc @Tomas_Pevny Here is my entire code.base. Thank you for your help.
using Revise
using MAT
using Flux
using Flux.Tracker
using Random
Random.seed!(1234)
push!(LOAD_PATH, "/Users/eklavya/WORK_RAJ/ML_JL/pinn_julia/src")
using pinn_subroutine
fh = matopen("/Users/eklavya/WORK_RAJ/ML_JL/pinn_julia/data/burgers_shock.mat")
x=read(fh, "x")
t=read(fh, "t")
X=x'.* ones(length(t))
T=ones(length(x))' .* t
xu = collect(Iterators.flatten(X))
tu = collect(Iterators.flatten(T))
uexact = real(read(fh, "usol"))
layers = [2, 20, 20, 1]
w,b = initialize_nn(layers)
X = hcat(xu, tu)
X = X'
u_nn = net_u(xu,tu, w, b; act=σ)
u_x, u_t = net_f(xu, tu, w, b; act=σ)
Subroutines
function net_f(x, t, w, b; act=σ)
u_nn = net_u(x, t, w, b; act=σ) # u_nn is of size (25600, 1)
## x, t, w and b are initialized as parameters
Tracker.back!(u_nn, ones(size(x)[1]))
u_x = Tracker.grad(x)
u_t = Tracker.grad(t)
return u_x, u_t
end
function xavier_init(l)
in_dim = l[1]
out_dim = l[2]
xavier_stddev = sqrt(2.0/(in_dim + out_dim))
distMethod=rand(Truncated(Normal(0.0, xavier_stddev), -Inf, Inf))
w=[distMethod for i in 1:in_dim, j in 1:out_dim]
b=zeros(1, out_dim)
return w, b
end
function initialize_nn(layer)
w=Any[]
b=Any[]
for i in 1:length(layer)-1
wl, bl= xavier_init([layer[i],layer[i+1]])
push!(w, wl)
push!(b, bl)
end
return w, b
end
function net_u(x, t, w, b; act=σ)
X = hcat(x, t)
for i in 1:length(w)-1
Y = act.(X*w[i] .+ b[i])
X=Y
end
Y = X*w[end] .+ b[end]
return Y
end