Solving PDEs with ModelingToolkit and NeuralPDE#
Solving Poisson PDE Systems
\[
\partial^{2}_{x}u(x,y) + \partial^{2}_{y}u(x,y) = -\sin (\pi x) \sin (\pi y)
\]
with boundary conditions
\[\begin{split}
\begin{align}
u(0, y) &= 0 \\
u(1, y) &= 0 \\
u(x, 0) &= 0 \\
u(x, 1) &= 0 \\
\end{align}
\end{split}\]
where
\(x ∈ [0, 1], y ∈ [0, 1]\)
using NeuralPDE
using Lux
using Plots
using Optimization
using OptimizationOptimJL
using ModelingToolkit
using ModelingToolkit: Interval
[ Info: Precompiling IJuliaExt [2f4121a4-3b3a-5ce6-9c5e-1f2673ce168a]
@variables x y u(..)
Dxx = Differential(x)^2
Dyy = Differential(y)^2
Differential(y) ∘ Differential(y)
2D PDE equations
eq = Dxx(u(x, y)) + Dyy(u(x, y)) ~ -sinpi(x) * sinpi(y)
\[ \begin{equation}
\frac{\mathrm{d}}{\mathrm{d}y} \frac{\mathrm{d}}{\mathrm{d}y} u\left( x, y \right) + \frac{\mathrm{d}}{\mathrm{d}x} \frac{\mathrm{d}}{\mathrm{d}x} u\left( x, y \right) = - \mathrm{sinpi}\left( x \right) \mathrm{sinpi}\left( y \right)
\end{equation}
\]
Boundary conditions
bcs = [
u(0, y) ~ 0.0, u(1, y) ~ 0.0,
u(x, 0) ~ 0.0, u(x, 1) ~ 0.0
]
\[\begin{split} \begin{align}
u\left( 0, y \right) =& 0 \\
u\left( 1, y \right) =& 0 \\
u\left( x, 0 \right) =& 0 \\
u\left( x, 1 \right) =& 0
\end{align}
\end{split}\]
Space domains
domains = [
x ∈ Interval(0.0, 1.0),
y ∈ Interval(0.0, 1.0)
]
2-element Vector{Symbolics.VarDomainPairing}:
Symbolics.VarDomainPairing(x, 0.0 .. 1.0)
Symbolics.VarDomainPairing(y, 0.0 .. 1.0)
Build a neural network for the PDE solver.
dim = 2
chain = Lux.Chain(Dense(dim, 16, Lux.σ), Dense(16, 16, Lux.σ), Dense(16, 1))
Chain(
layer_1 = Dense(2 => 16, sigmoid_fast), # 48 parameters
layer_2 = Dense(16 => 16, sigmoid_fast), # 272 parameters
layer_3 = Dense(16 => 1), # 17 parameters
) # Total: 337 parameters,
# plus 0 states.
Discretization method uses PhysicsInformedNN()
(PINN).
dx = 0.05
discretization = PhysicsInformedNN(chain, GridTraining(dx))
PhysicsInformedNN{GridTraining{Float64}, Nothing, NeuralPDE.Phi{Chain{@NamedTuple{layer_1::Dense{true, typeof(sigmoid_fast), typeof(glorot_uniform), typeof(zeros32)}, layer_2::Dense{true, typeof(sigmoid_fast), typeof(glorot_uniform), typeof(zeros32)}, layer_3::Dense{true, typeof(identity), typeof(glorot_uniform), typeof(zeros32)}}}, @NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}, layer_3::@NamedTuple{}}}, typeof(NeuralPDE.numeric_derivative), Bool, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, @NamedTuple{}}}(Chain(), GridTraining{Float64}(0.05), nothing, NeuralPDE.Phi{Chain{@NamedTuple{layer_1::Dense{true, typeof(sigmoid_fast), typeof(glorot_uniform), typeof(zeros32)}, layer_2::Dense{true, typeof(sigmoid_fast), typeof(glorot_uniform), typeof(zeros32)}, layer_3::Dense{true, typeof(identity), typeof(glorot_uniform), typeof(zeros32)}}}, @NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}, layer_3::@NamedTuple{}}}(Chain(), (layer_1 = NamedTuple(), layer_2 = NamedTuple(), layer_3 = NamedTuple())), NeuralPDE.numeric_derivative, false, nothing, nothing, nothing, LogOptions(50), [1], true, false, Base.Pairs{Symbol, Union{}, Tuple{}, @NamedTuple{}}())
Next we build our PDE system and discretize it.
Because this system is time-invariant, the result is an OptimizationProblem
.
@named pde_system = PDESystem(eq, bcs, domains, [x, y], [u(x, y)])
prob = discretize(pde_system, discretization)
OptimizationProblem. In-place: true
u0: ComponentVector{Float64}(layer_1 = (weight = [0.35971200466156006 -0.2037937194108963; 0.1393420696258545 0.5225875377655029; … ; -0.13923890888690948 0.18260620534420013; 0.4451419413089752 0.1468936800956726], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_2 = (weight = [-0.4216949939727783 -0.06320850551128387 … 0.16946706175804138 -0.22122304141521454; -0.11275719851255417 -0.2728542983531952 … -0.1621120572090149 0.2045103758573532; … ; 0.15988630056381226 0.09907838702201843 … -0.2601696848869324 -0.2974882423877716; -0.15108203887939453 -0.2304273545742035 … -0.0949699729681015 0.025790985673666], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_3 = (weight = [0.17299450933933258 0.5610336065292358 … -0.5511547923088074 -0.5100366473197937], bias = [0.0;;]))
alg = OptimizationOptimJL.BFGS()
BFGS{LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Nothing, Nothing, Flat}(LineSearches.InitialStatic{Float64}
alpha: Float64 1.0
scaled: Bool false
, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}
delta: Float64 0.1
sigma: Float64 0.9
alphamax: Float64 Inf
rho: Float64 5.0
epsilon: Float64 1.0e-6
gamma: Float64 0.66
linesearchmax: Int64 50
psi3: Float64 0.1
display: Int64 0
mayterminate: Base.RefValue{Bool}
, nothing, nothing, Flat())
Callback function
larr = Float64[]
callback = function (p, l)
push!(larr, l)
return false
end
#1 (generic function with 1 method)
Solve the problem.
res = Optimization.solve(prob, alg, callback = callback, maxiters=1500)
plot(larr, xlabel="Iters", title="Loss", yscale=:log10, lab="Loss")
Plot the predicted solution of the PDE and compare it with the analytical solution to see the relative error.
xs, ys = [infimum(d.domain):dx/10:supremum(d.domain) for d in domains]
analytic_sol_func(x,y) = (sinpi(x)*sinpi(y))/(2pi^2)
phi = discretization.phi
u_predict = reshape([first(phi([x, y], res.u)) for x in xs for y in ys], (length(xs), length(ys)))
u_real = reshape([analytic_sol_func(x, y) for x in xs for y in ys], (length(xs), length(ys)))
diff_u = abs.(u_predict .- u_real)
201×201 Matrix{Float64}:
3.70554e-5 3.29239e-5 2.89012e-5 2.49825e-5 … 5.02658e-5 5.7982e-5
3.68977e-5 3.29307e-5 2.90651e-5 2.52963e-5 5.47063e-5 6.23888e-5
3.67626e-5 3.29521e-5 2.92359e-5 2.56095e-5 5.89002e-5 6.65435e-5
3.66458e-5 3.29841e-5 2.94099e-5 2.59189e-5 6.28553e-5 7.0454e-5
3.65432e-5 3.30229e-5 2.95836e-5 2.62213e-5 6.65789e-5 7.41281e-5
3.64508e-5 3.30649e-5 2.97537e-5 2.65136e-5 … 7.00784e-5 7.75733e-5
3.63649e-5 3.31067e-5 2.99172e-5 2.67932e-5 7.33607e-5 8.07968e-5
3.62821e-5 3.31451e-5 3.00713e-5 2.70574e-5 7.64326e-5 8.38056e-5
3.61992e-5 3.31773e-5 3.02132e-5 2.7304e-5 7.93006e-5 8.66066e-5
3.61131e-5 3.32005e-5 3.03406e-5 2.75307e-5 8.19712e-5 8.92063e-5
3.60211e-5 3.32122e-5 3.04513e-5 2.77356e-5 … 8.44503e-5 9.16111e-5
3.59204e-5 3.32101e-5 3.05431e-5 2.7917e-5 8.6744e-5 9.38271e-5
3.58089e-5 3.3192e-5 3.06142e-5 2.80732e-5 8.8858e-5 9.58602e-5
⋮ ⋱ ⋮
9.91457e-7 2.3647e-6 3.698e-6 4.99244e-6 7.68185e-5 7.89867e-5
1.59197e-6 3.00006e-6 4.36572e-6 5.69016e-6 … 8.29704e-5 8.53556e-5
2.23181e-6 3.67617e-6 5.07547e-6 6.43107e-6 8.91654e-5 9.17697e-5
2.91032e-6 4.39252e-6 5.82691e-6 7.21497e-6 9.53975e-5 9.82228e-5
3.62675e-6 5.14851e-6 6.61956e-6 8.04152e-6 0.00010166 0.000104709
4.38017e-6 5.94338e-6 7.45283e-6 8.91028e-6 0.000107948 0.00011122
5.16954e-6 6.77625e-6 8.326e-6 9.8207e-6 … 0.000114253 0.000117751
5.99364e-6 7.64609e-6 9.23821e-6 1.07721e-5 0.000120569 0.000124293
6.85111e-6 8.55172e-6 1.01885e-5 1.17636e-5 0.000126889 0.00013084
7.74044e-6 9.49181e-6 1.11756e-5 1.27942e-5 0.000133205 0.000137383
8.65993e-6 1.04649e-5 1.21984e-5 1.3863e-5 0.000139509 0.000143914
9.60772e-6 1.14692e-5 1.32552e-5 1.49685e-5 … 0.000145794 0.000150426
p1 = plot(xs, ys, u_real, linetype=:contourf, title = "analytic");
p2 = plot(xs, ys, u_predict, linetype=:contourf, title = "predict");
p3 = plot(xs, ys, diff_u, linetype=:contourf, title = "error");
plot(p1, p2, p3)