Solving PDEs with ModelingToolkit and NeuralPDE#
Solving Poisson PDE Systems (https://docs.sciml.ai/NeuralPDE/stable/tutorials/pdesystem/)
\[
\partial^{2}_{x}u(x,y) + \partial^{2}_{y}u(x,y) = -\sin (\pi x) \sin (\pi y)
\]
with boundary conditions
\[\begin{split}
\begin{align}
u(0, y) &= 0 \\
u(1, y) &= 0 \\
u(x, 0) &= 0 \\
u(x, 1) &= 0 \\
\end{align}
\end{split}\]
where
\(x ∈ [0, 1], y ∈ [0, 1]\)
using NeuralPDE
using Lux
using Optimization
using OptimizationOptimJL
using ModelingToolkit
using DomainSets
using LineSearches
using Plots
2D PDE
@parameters x y
@variables u(..)
Dxx = Differential(x)^2
Dyy = Differential(y)^2
eq = Dxx(u(x, y)) + Dyy(u(x, y)) ~ -sinpi(x) * sinpi(y)
\[ \begin{equation}
\frac{\mathrm{d}}{\mathrm{d}y} \frac{\mathrm{d}}{\mathrm{d}y} u\left( x, y \right) + \frac{\mathrm{d}}{\mathrm{d}x} \frac{\mathrm{d}}{\mathrm{d}x} u\left( x, y \right) = - sinpi\left( x \right) sinpi\left( y \right)
\end{equation}
\]
Boundary conditions
bcs = [
u(0, y) ~ 0.0,
u(1, y) ~ 0.0,
u(x, 0) ~ 0.0,
u(x, 1) ~ 0.0
]
\[\begin{split} \begin{align}
u\left( 0, y \right) &= 0 \\
u\left( 1, y \right) &= 0 \\
u\left( x, 0 \right) &= 0 \\
u\left( x, 1 \right) &= 0
\end{align}
\end{split}\]
Space domains
domains = [
x ∈ DomainSets.Interval(0.0, 1.0),
y ∈ DomainSets.Interval(0.0, 1.0)
]
2-element Vector{Symbolics.VarDomainPairing}:
Symbolics.VarDomainPairing(x, 0.0 .. 1.0)
Symbolics.VarDomainPairing(y, 0.0 .. 1.0)
Build a neural network for the PDE solver.
Input: 2 dimensions.
Hidden layers: 16 neurons * 2 layers.
Output: single output u(x, y)
dim = 2
chain = Lux.Chain(Dense(dim, 16, Lux.σ), Dense(16, 16, Lux.σ), Dense(16, 1))
Chain(
layer_1 = Dense(2 => 16, σ), # 48 parameters
layer_2 = Dense(16 => 16, σ), # 272 parameters
layer_3 = Dense(16 => 1), # 17 parameters
) # Total: 337 parameters,
# plus 0 states.
Discretization method usesPhysicsInformedNN() (PINN).
dx = 0.05
discretization = PhysicsInformedNN(chain, QuadratureTraining(; batch = 200, abstol = 1e-6, reltol = 1e-6))
NeuralPDE.PhysicsInformedNN{Lux.Chain{@NamedTuple{layer_1::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_2::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_3::Lux.Dense{typeof(identity), Int64, Int64, Nothing, Nothing, Static.True}}, Nothing}, NeuralPDE.QuadratureTraining{Float64, Integrals.CubatureJLh}, Nothing, Nothing, NeuralPDE.Phi{LuxCore.StatefulLuxLayerImpl.StatefulLuxLayer{Val{true}, Lux.Chain{@NamedTuple{layer_1::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_2::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_3::Lux.Dense{typeof(identity), Int64, Int64, Nothing, Nothing, Static.True}}, Nothing}, Nothing, @NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}, layer_3::@NamedTuple{}}}}, typeof(NeuralPDE.numeric_derivative), Bool, Nothing, Nothing, Nothing, Base.RefValue{Int64}, Base.Pairs{Symbol, Union{}, Nothing, @NamedTuple{}}}(Lux.Chain{@NamedTuple{layer_1::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_2::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_3::Lux.Dense{typeof(identity), Int64, Int64, Nothing, Nothing, Static.True}}, Nothing}((layer_1 = Dense(2 => 16, σ), layer_2 = Dense(16 => 16, σ), layer_3 = Dense(16 => 1)), nothing), NeuralPDE.QuadratureTraining{Float64, Integrals.CubatureJLh}(Integrals.CubatureJLh(0), 1.0e-6, 1.0e-6, 1000, 200), nothing, nothing, NeuralPDE.Phi{LuxCore.StatefulLuxLayerImpl.StatefulLuxLayer{Val{true}, Lux.Chain{@NamedTuple{layer_1::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_2::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_3::Lux.Dense{typeof(identity), Int64, Int64, Nothing, Nothing, Static.True}}, Nothing}, Nothing, @NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}, layer_3::@NamedTuple{}}}}(LuxCore.StatefulLuxLayerImpl.StatefulLuxLayer{Val{true}, Lux.Chain{@NamedTuple{layer_1::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_2::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_3::Lux.Dense{typeof(identity), Int64, Int64, Nothing, Nothing, Static.True}}, Nothing}, Nothing, @NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}, layer_3::@NamedTuple{}}}(Lux.Chain{@NamedTuple{layer_1::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_2::Lux.Dense{typeof(NNlib.σ), Int64, Int64, Nothing, Nothing, Static.True}, layer_3::Lux.Dense{typeof(identity), Int64, Int64, Nothing, Nothing, Static.True}}, Nothing}((layer_1 = Dense(2 => 16, σ), layer_2 = Dense(16 => 16, σ), layer_3 = Dense(16 => 1)), nothing), nothing, (layer_1 = NamedTuple(), layer_2 = NamedTuple(), layer_3 = NamedTuple()), nothing, Val{true}())), NeuralPDE.numeric_derivative, false, nothing, nothing, nothing, NeuralPDE.LogOptions(50), Base.RefValue{Int64}(1), true, false, Base.Pairs{Symbol, Union{}, Nothing, @NamedTuple{}}())
Build the PDE system and discretize it.
@named pde_system = PDESystem(eq, bcs, domains, [x, y], [u(x, y)])
prob = discretize(pde_system, discretization)
OptimizationProblem. In-place: true
u0: ComponentVector{Float64}(layer_1 = (weight = [0.01593761146068573 0.9437114596366882; 0.38433703780174255 0.9814639091491699; … ; -0.2710345983505249 0.3668804466724396; 0.4747437536716461 0.014643167145550251], bias = [0.2955634593963623, 0.3480769097805023, 0.1187773197889328, 0.610325813293457, -0.5269585847854614, 0.6098943948745728, -0.6907129883766174, 0.2754194438457489, 0.5037071704864502, 0.43409743905067444, -0.4830687940120697, 0.518242359161377, 0.37672683596611023, -0.3884975016117096, 0.20888172090053558, -0.6359825134277344]), layer_2 = (weight = [0.16647011041641235 -0.3496563136577606 … 0.10431768000125885 0.3861742615699768; -0.20201463997364044 0.02434389479458332 … 0.25018128752708435 0.386810302734375; … ; -0.1645582914352417 0.10840312391519547 … -0.3115653991699219 -0.34907016158103943; -0.027325673028826714 0.10249324887990952 … -0.1554419994354248 -0.43279141187667847], bias = [-0.19360753893852234, 0.018820255994796753, 0.12277039885520935, -0.2002711296081543, -0.08724513649940491, 0.24476459622383118, 0.1548592448234558, 0.2414514124393463, 0.19891619682312012, 0.18439680337905884, 0.09407466650009155, -0.2425697147846222, -0.050651997327804565, 0.17767742276191711, 0.23566216230392456, -0.11394006013870239]), layer_3 = (weight = [-0.050057552754879 0.3356141448020935 … 0.18237975239753723 -0.21478138864040375], bias = [0.13261756300926208]))
Callback function to record the loss
lossrecord = Float64[]
callback = function (p, l)
push!(lossrecord, l)
return false
end
#2 (generic function with 1 method)
Solve the problem. It may take a long time.
opt = OptimizationOptimJL.LBFGS(linesearch = LineSearches.BackTracking())
@time res = Optimization.solve(prob, opt, callback = callback, maxiters=1000)
2211.984725 seconds (8.92 G allocations: 541.665 GiB, 2.27% gc time, 5.32% compilation time: <1% of which was recompilation)
retcode: Success
u: ComponentVector{Float64}(layer_1 = (weight = [-0.4963385469661655 1.2791340849597663; -1.1308020899010733 2.2244527509173753; … ; -0.9385086901357104 -0.1629861466293824; -0.6895669158081396 -0.44531009752049705], bias = [-0.5828213017039796, 0.044852340027283034, 0.8485225063717241, 3.0391887982952137, -1.052376147412321, 0.6003783491749841, -2.7678566104935367, -1.2040360062670283, -0.44651011699548676, 0.7972014904756165, -0.4699799679150705, 0.5484686086216702, 0.8740018139638475, 1.020619345652977, 1.1228600249214342, -0.3183623851503068]), layer_2 = (weight = [0.5322526655793225 0.06877510299386397 … -0.01131934295432979 0.40238574625856016; 0.14817045706274945 -0.12605331291129 … -0.252767057089871 0.1213091893377981; … ; 0.02135001133758635 -0.10063160691188061 … -0.6229379187748646 -0.5576484869980327; 0.48220017566961154 0.5337802094928503 … 0.16113300348271262 0.1748362916091515], bias = [-0.026560594337454266, 0.16457362320294122, 0.18206044161573284, 0.17274370373734724, 0.13607292481051525, 0.4853661174782178, -0.02493195369338051, 0.6528374490189502, 0.30740860213948845, 0.28376467527334615, 0.33252037081166774, -0.5017135717851026, -0.6460856388047476, -0.12800320809124255, 0.251757289164014, 0.6047349267731514]), layer_3 = (weight = [-0.3139722008682866 1.0597854465252863 … 0.13952958112074246 -0.8105734824380287], bias = [0.12388887909435252]))
plot(lossrecord, xlabel="Iters", yscale=:log10, ylabel="Loss", lab=false)
Plot the predicted solution of the PDE and compare it with the analytical solution to see the relative error.
xs, ys = [DomainSets.infimum(d.domain):dx/10:DomainSets.supremum(d.domain) for d in domains]
analytic_sol_func(x,y) = (sinpi(x)*sinpi(y))/(2pi^2)
phi = discretization.phi
u_predict = reshape([first(phi([x, y], res.u)) for x in xs for y in ys], (length(xs), length(ys)))
u_real = reshape([analytic_sol_func(x, y) for x in xs for y in ys], (length(xs), length(ys)))
diff_u = abs.(u_predict .- u_real)
201×201 Matrix{Float64}:
0.00191279 0.00187143 0.00183002 … 0.00188942 0.00197524
0.00184642 0.00180712 0.00176774 0.00186328 0.00194684
0.0017809 0.00174362 0.00170622 0.0018369 0.00191825
0.00171622 0.00168092 0.00164547 0.00181029 0.00188946
0.0016524 0.00161902 0.00158547 0.00178347 0.0018605
0.00158943 0.00155794 0.00152625 … 0.00175643 0.00183136
0.00152732 0.00149768 0.0014678 0.00172919 0.00180205
0.00146607 0.00143823 0.00141013 0.00170176 0.00177258
0.00140568 0.00137961 0.00135324 0.00167415 0.00174297
0.00134616 0.00132181 0.00129713 0.00164636 0.00171322
⋮ ⋱ ⋮
0.00049688 0.000450857 0.000406096 0.000327097 0.000315654
0.00047564 0.000429187 0.000384024 0.000357765 0.000346686
0.000453455 0.000406571 0.000361005 0.000389288 0.000378592
0.000430326 0.000383011 0.000337042 … 0.000421689 0.000411394
0.000406256 0.000358508 0.000312135 0.000454992 0.000445118
0.000381248 0.000333064 0.000286286 0.000489221 0.000479788
0.000355305 0.000306683 0.000259498 0.000524402 0.00051543
0.00032843 0.000279369 0.000231773 0.00056056 0.000552071
0.000300629 0.000251124 0.000203115 … 0.000597722 0.000589737
p1 = plot(xs, ys, u_real, linetype=:contourf, title = "analytic");
p2 = plot(xs, ys, u_predict, linetype=:contourf, title = "predicted");
p3 = plot(xs, ys, diff_u, linetype=:contourf, title = "error");
plot(p1, p2, p3)
This notebook was generated using Literate.jl.