Skip to content

Commit 54377c0

Browse files
authored
Merge pull request #141 from JuliaGeodynamics/bk-chmy
Integration with Chmy.jl
2 parents 4dcf238 + 115a1cc commit 54377c0

File tree

14 files changed

+589
-50
lines changed

14 files changed

+589
-50
lines changed

.github/workflows/CI.yml

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,12 @@ on:
66
branches:
77
- main
88
tags: '*'
9+
10+
# Cancel redundant CI tests automatically
11+
concurrency:
12+
group: ${{ github.workflow }}-${{ github.ref }}
13+
cancel-in-progress: true
14+
915
jobs:
1016
run_tests:
1117
name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }}
@@ -16,11 +22,12 @@ jobs:
1622
version:
1723
- '1.9'
1824
- '1.10'
19-
- '~1.11.0-0'
25+
- '1.11'
2026
- 'nightly'
2127
os:
2228
- ubuntu-latest
2329
- macOS-latest
30+
- macos-14
2431
- windows-latest
2532
arch:
2633
- x64

Project.toml

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "GeophysicalModelGenerator"
22
uuid = "3700c31b-fa53-48a6-808a-ef22d5a84742"
33
authors = ["Boris Kaus", "Marcel Thielmann"]
4-
version = "0.7.7"
4+
version = "0.7.8"
55

66
[deps]
77
Colors = "5ae59095-9a9b-59fe-a467-6f913c188581"
@@ -32,23 +32,26 @@ WhereTheWaterFlows = "ea906314-1493-4d22-a0af-f886a20c9fba"
3232
WriteVTK = "64499a7a-5c06-52f2-abe2-ccb03c286192"
3333

3434
[weakdeps]
35+
Chmy = "33a72cf0-4690-46d7-b987-06506c2248b9"
3536
GLMakie = "e9467ef8-e4e7-5192-8a1a-b1aee30e663a"
3637
GMT = "5752ebe1-31b9-557e-87aa-f909b540aa54"
3738
GridapGmsh = "3025c34a-b394-11e9-2a55-3fee550c04c8"
3839

3940
[extensions]
41+
Chmy_utils = "Chmy"
4042
GLMakie_Visualisation = "GLMakie"
4143
GMT_utils = "GMT"
4244
Gmsh_utils = "GridapGmsh"
4345

4446
[compat]
47+
Chmy = "0.1.20"
4548
Colors = "0.9 - 0.12"
4649
Downloads = "1"
4750
FFMPEG = "0.4"
4851
FileIO = "1"
49-
GDAL_jll = "300.900.0 - 301.900.0"
50-
GLMakie = "0.10"
51-
GMT = "1"
52+
GDAL_jll = "300.900.0 - 301.901.0"
53+
GLMakie = "0.8, 0.9, 0.10"
54+
GMT = "1.0 - 1.14"
5255
GeoParams = "0.2 - 0.6"
5356
Geodesy = "1"
5457
GeometryBasics = "0.1 - 0.4"
@@ -63,15 +66,17 @@ NearestNeighbors = "0.2 - 0.4"
6366
Parameters = "0.9 - 0.12"
6467
SpecialFunctions = "1.0, 2"
6568
StaticArrays = "1"
66-
WhereTheWaterFlows = "0.10"
69+
WhereTheWaterFlows = "0.10, 0.11"
6770
WriteVTK = "1"
6871
julia = "1.9"
6972

7073
[extras]
74+
Chmy = "33a72cf0-4690-46d7-b987-06506c2248b9"
7175
GLMakie = "e9467ef8-e4e7-5192-8a1a-b1aee30e663a"
7276
GMT = "5752ebe1-31b9-557e-87aa-f909b540aa54"
77+
KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c"
7378
StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3"
7479
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
7580

7681
[targets]
77-
test = ["Test", "GMT", "StableRNGs", "GridapGmsh"]
82+
test = ["Test", "GMT", "StableRNGs", "GridapGmsh", "Chmy","KernelAbstractions"]

docs/make.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,8 @@ makedocs(;
122122
"Gravity code" => "man/gravity_code.md",
123123
"Numerical model setups" => "man/geodynamic_setups.md",
124124
"LaMEM" => "man/lamem.md",
125-
"pTatin" => "man/lamem.md",
125+
"pTatin" => "man/ptatin.md",
126+
"Chmy" => "man/Tutorial_Chmy_MPI.md",
126127
"Profile Processing" => "man/profile_processing.md",
127128
"Gmsh" => "man/gmsh.md",
128129
"Movies" => "man/movies.md"

docs/src/man/Tutorial_Chmy_MPI.md

Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
```@meta
2+
EditURL = "../../../tutorials/Tutorial_Chmy_MPI.jl"
3+
```
4+
5+
# Create an initial model setup for Chmy and run it in parallel
6+
7+
## Aim
8+
In this tutorial, your will learn how to use [Chmy](https://github.com/PTsolvers/Chmy.jl) to perform a 2D diffusion simulation
9+
on one or multiple CPU's or GPU's.
10+
`Chmy` is a package that allows you to specify grids and fields and create finite difference simulations
11+
12+
## 1. Load Chmy and required packages
13+
14+
```julia
15+
using Chmy, Chmy.Architectures, Chmy.Grids, Chmy.Fields, Chmy.BoundaryConditions, Chmy.GridOperators, Chmy.KernelLaunch
16+
using KernelAbstractions
17+
using Printf
18+
using CairoMakie
19+
using GeophysicalModelGenerator
20+
```
21+
22+
In case you want to use GPU's, you need to sort out whether you have AMD or NVIDIA GPU's
23+
and load the package accordingly:
24+
using AMDGPU
25+
AMDGPU.allowscalar(false)
26+
using CUDA
27+
CUDA.allowscalar(false)
28+
29+
To run this in parallel you need to load this:
30+
31+
```julia
32+
using Chmy.Distributed
33+
using MPI
34+
MPI.Init()
35+
```
36+
37+
## 2. Define computational routines
38+
You need to specify compute kernel for the gradients:
39+
40+
```julia
41+
@kernel inbounds = true function compute_q!(q, C, χ, g::StructuredGrid, O)
42+
I = @index(Global, NTuple)
43+
I = I + O
44+
q.x[I...] = -χ * ∂x(C, g, I...)
45+
q.y[I...] = -χ * ∂y(C, g, I...)
46+
end
47+
```
48+
49+
You need to specify compute kernel to update the concentration
50+
51+
```julia
52+
@kernel inbounds = true function update_C!(C, q, Δt, g::StructuredGrid, O)
53+
I = @index(Global, NTuple)
54+
I = I + O
55+
C[I...] -= Δt * divg(q, g, I...)
56+
end
57+
```
58+
59+
And a main function is required:
60+
61+
```julia
62+
@views function main(backend=CPU(); nxy_l=(126, 126))
63+
arch = Arch(backend, MPI.COMM_WORLD, (0, 0))
64+
topo = topology(arch)
65+
me = global_rank(topo)
66+
67+
# geometry
68+
dims_l = nxy_l
69+
dims_g = dims_l .* dims(topo)
70+
grid = UniformGrid(arch; origin=(-2, -2), extent=(4, 4), dims=dims_g)
71+
launch = Launcher(arch, grid, outer_width=(16, 8))
72+
73+
##@info "mpi" me grid
74+
75+
nx, ny = dims_g
76+
# physics
77+
χ = 1.0
78+
# numerics
79+
Δt = minimum(spacing(grid))^2 / χ / ndims(grid) / 2.1
80+
# allocate fields
81+
C = Field(backend, grid, Center())
82+
P = Field(backend, grid, Center(), Int32) # phases
83+
84+
q = VectorField(backend, grid)
85+
C_v = (me==0) ? KernelAbstractions.zeros(CPU(), Float64, size(interior(C)) .* dims(topo)) : nothing
86+
87+
# Use the `GeophysicalModelGenerator` to set the initial conditions. Note that
88+
# you have to call this for a `Phases` and a `Temp` grid, which we call `C` here.
89+
add_box!(P,C,grid, xlim=(-1.0,1.0), zlim=(-1.0,1.0), phase=ConstantPhase(4), T=ConstantTemp(400))
90+
91+
# set BC's and updates the halo:
92+
bc!(arch, grid, C => Neumann(); exchange=C)
93+
94+
# visualisation
95+
fig = Figure(; size=(400, 320))
96+
ax = Axis(fig[1, 1]; aspect=DataAspect(), xlabel="x", ylabel="y", title="it = 0")
97+
plt = heatmap!(ax, centers(grid)..., interior(C) |> Array; colormap=:turbo)
98+
Colorbar(fig[1, 2], plt)
99+
# action
100+
nt = 100
101+
for it in 1:nt
102+
(me==0) && @printf("it = %d/%d \n", it, nt)
103+
launch(arch, grid, compute_q! => (q, C, χ, grid))
104+
launch(arch, grid, update_C! => (C, q, Δt, grid); bc=batch(grid, C => Neumann(); exchange=C))
105+
end
106+
KernelAbstractions.synchronize(backend)
107+
gather!(arch, C_v, C)
108+
if me == 0
109+
fig = Figure(; size=(400, 320))
110+
ax = Axis(fig[1, 1]; aspect=DataAspect(), xlabel="x", ylabel="y", title="it = 0")
111+
plt = heatmap!(ax, C_v; colormap=:turbo) # how to get the global grid for axes?
112+
Colorbar(fig[1, 2], plt)
113+
save("out_gather_$nx.png", fig)
114+
end
115+
return
116+
end
117+
```
118+
119+
In the code above, the part that calls `GMG` is:
120+
121+
```julia
122+
add_box!(P,C,grid, xlim=(-1.0,1.0), zlim=(-1.0,1.0), phase=ConstantPhase(4), T=ConstantTemp(400))
123+
```
124+
which works just like any of the other GMG function.
125+
126+
## 3. Run the simulation on one CPU machine or GPU card:
127+
128+
Running the code on the CPU is done with this:
129+
130+
```julia
131+
n = 128
132+
main(; nxy_l=(n, n) .- 2)
133+
```
134+
135+
If you instead want to run this on AMD or NVIDIA GPU's do this:
136+
137+
```julia
138+
# main(ROCBackend(); nxy_l=(n, n) .- 2)
139+
# main(CUDABackend(); nxy_l=(n, n) .- 2)
140+
```
141+
142+
And we need to finalize the simulation with
143+
144+
```julia
145+
MPI.Finalize()
146+
```
147+
148+
## 4. Run the simulation on an MPI-parallel machine
149+
If you want to run this on multiple cores, you will need to setup the [MPI.jl]() package,
150+
such that `mpiexecjl` is created on the command line.
151+
152+
You can than run it with:
153+
mpiexecjl -n 4 --project=. julia Tutorial_Chmy_MPI.jl
154+
155+
The full file can be downloaded [here](../../../tutorials/Tutorial_Chmy_MPI.jl)
156+
157+
---
158+
159+
*This page was generated using [Literate.jl](https://github.com/fredrikekre/Literate.jl).*
160+

ext/Chmy_utils.jl

Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
#helper functions to make GMG work with Chmy grids and fields
2+
module Chmy_utils
3+
4+
using Chmy, Chmy.Grids, Chmy.Fields
5+
6+
import GeophysicalModelGenerator: create_CartGrid, CartGrid, CartData
7+
import GeophysicalModelGenerator: add_box!, add_sphere!, add_ellipsoid!, add_cylinder!
8+
import GeophysicalModelGenerator: add_layer!, add_polygon!, add_slab!, add_stripes!, add_volcano!
9+
import GeophysicalModelGenerator: above_surface, below_surface
10+
11+
println("Loading Chmy-GMG tools")
12+
13+
"""
14+
CartGrid = create_CartGrid(grid::StructuredGrid; ylevel=0.0)
15+
16+
Creates a GMG `CartGrid` data structure from a `Chmy` grid object
17+
"""
18+
function create_CartGrid(grid::StructuredGrid; ylevel=0.0)
19+
20+
coord1D = Vector.(coords(grid, Vertex()))
21+
coord1D_cen = Vector.(coords(grid, Center()))
22+
N = length.(coord1D)
23+
L = extent(grid, Vertex())
24+
X₁ = origin(grid, Vertex())
25+
Δ = spacing(grid)
26+
ConstantΔ = false;
27+
if isa(grid, UniformGrid)
28+
ConstantΔ = true
29+
end
30+
if ndims(grid)==2
31+
# we need a special treatment of this, as all GMG routines work with 3D coordinates
32+
X₁ = (X₁[1], ylevel, X₁[2])
33+
L = (L[1], 0.0, L[2])
34+
Δ = (Δ[1], 0.0, Δ[2])
35+
N = (N[1],1,N[2])
36+
coord1D = (coord1D[1], [0.0], coord1D[2])
37+
coord1D_cen = (coord1D_cen[1], [0.0], coord1D_cen[2])
38+
end
39+
Xₙ = X₁ .+ L
40+
41+
42+
return CartGrid(ConstantΔ,N,Δ,L,X₁,Xₙ,coord1D, coord1D_cen)
43+
end
44+
45+
# all functions to be ported
46+
function_names = (:add_box!, :add_sphere!, :add_ellipsoid!, :add_cylinder!, :add_layer!, :add_polygon!, :add_slab!, :add_stripes!, :add_volcano!)
47+
48+
for fn in function_names
49+
50+
@eval begin
51+
"""
52+
$($fn)( Phase::Field,
53+
Temp::Field,
54+
Grid::StructuredGrid; # required input
55+
kwargs...)
56+
57+
Sets `$($fn)` function for `Chmy` fields and grids.
58+
"""
59+
function $fn( Phase::Field,
60+
Temp::Field,
61+
Grid::StructuredGrid; # required input
62+
kwargs...)
63+
64+
CartGrid = create_CartGrid(Grid)
65+
66+
cell = false
67+
if all(location(Phase).==Center())
68+
cell = true
69+
end
70+
71+
return ($fn)(Phase, Temp, CartGrid; cell=cell, kwargs...)
72+
end
73+
end
74+
75+
end
76+
77+
78+
# all functions to be ported
79+
function_names = (:above_surface, :below_surface)
80+
81+
for fn in function_names
82+
83+
@eval begin
84+
"""
85+
$($fn)( Grid::StructuredGrid, field::Field, DataSurface_Cart::CartData; kwargs...)
86+
87+
Sets `$($fn)` function for `Chmy` grids and the field `field` which can be either on vertices or centers
88+
"""
89+
function $fn( Grid::StructuredGrid,
90+
field::Field,
91+
DataSurface_Cart::CartData;
92+
kwargs...)
93+
94+
CartGrid = create_CartGrid(Grid)
95+
96+
cell = false
97+
if all(location(field).==Center())
98+
cell = true
99+
end
100+
101+
return ($fn)(CartGrid, DataSurface_Cart; cell=cell, kwargs...)
102+
end
103+
end
104+
105+
end
106+
107+
108+
end # end of module

0 commit comments

Comments
 (0)