-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
213c303
commit 814b674
Showing
6 changed files
with
306 additions
and
16 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,96 @@ | ||
|
||
# This could be a DistributedSingleFieldFESpace if it accepted all kinds of FESpaces | ||
struct PatchDistributedMultiFieldFESpace{A,B} | ||
spaces :: A | ||
gids :: B | ||
end | ||
|
||
## PatchFESpace from MultiFieldFESpace | ||
|
||
function PatchFESpace(space::Gridap.MultiField.MultiFieldFESpace, | ||
patch_decomposition::PatchDecomposition, | ||
cell_conformity::Vector{<:CellConformity}; | ||
kwargs...) | ||
patch_spaces = map((s,c) -> PatchFESpace(s,patch_decomposition,c;kwargs...),space,cell_conformity) | ||
return MultiFieldFESpace(patch_spaces) | ||
end | ||
|
||
function PatchFESpace(space::GridapDistributed.DistributedMultiFieldFESpace, | ||
patch_decomposition::DistributedPatchDecomposition, | ||
cell_conformity::Vector{<:AbstractArray{<:CellConformity}}) | ||
model = patch_decomposition.model | ||
root_gids = get_face_gids(model,get_patch_root_dim(patch_decomposition)) | ||
|
||
cell_conformity = GridapDistributed.to_parray_of_arrays(cell_conformity) | ||
spaces = map(local_views(space), | ||
local_views(patch_decomposition), | ||
cell_conformity, | ||
partition(root_gids)) do space, patch_decomposition, cell_conformity, partition | ||
patches_mask = fill(false,local_length(partition)) | ||
patches_mask[ghost_to_local(partition)] .= true # Mask ghost patch roots | ||
PatchFESpace(space,patch_decomposition,cell_conformity;patches_mask=patches_mask) | ||
end | ||
|
||
# This PRange has no ghost dofs | ||
local_ndofs = map(num_free_dofs,spaces) | ||
global_ndofs = sum(local_ndofs) | ||
patch_partition = variable_partition(local_ndofs,global_ndofs,false) | ||
gids = PRange(patch_partition) | ||
return PatchDistributedMultiFieldFESpace(spaces,gids) | ||
end | ||
|
||
|
||
## MultiFieldFESpace from PatchFESpaces | ||
# | ||
#function Gridap.MultiField.MultiFieldFESpace(spaces::Vector{<:PatchFESpace}) | ||
# return PatchMultiFieldFESpace(spaces) | ||
#end | ||
# | ||
#function Gridap.MultiField.MultiFieldFESpace(spaces::Vector{<:GridapDistributed.DistributedSingleFieldFESpace{<:AbstractArray{T}}}) where T <: PatchFESpace | ||
# return PatchMultiFieldFESpace(spaces) | ||
#end | ||
# | ||
## MultiField API | ||
# | ||
#function Gridap.FESpaces.get_cell_dof_ids(f::PatchMultiFieldFESpace,trian::Triangulation) | ||
# offsets = Gridap.MultiField._compute_field_offsets(f) | ||
# nfields = length(f.spaces) | ||
# active_block_data = Any[] | ||
# for i in 1:nfields | ||
# cell_dofs_i = get_cell_dof_ids(f.spaces[i],trian) | ||
# if i == 1 | ||
# push!(active_block_data,cell_dofs_i) | ||
# else | ||
# offset = Int32(offsets[i]) | ||
# o = Fill(offset,length(cell_dofs_i)) | ||
# cell_dofs_i_b = lazy_map(Broadcasting(Gridap.MultiField._sum_if_first_positive),cell_dofs_i,o) | ||
# push!(active_block_data,cell_dofs_i_b) | ||
# end | ||
# end | ||
# return lazy_map(BlockMap(nfields,active_block_ids),active_block_data...) | ||
#end | ||
# | ||
#function Gridap.FESpaces.get_fe_basis(f::PatchMultiFieldFESpace) | ||
# nfields = length(f.spaces) | ||
# all_febases = MultiFieldFEBasisComponent[] | ||
# for field_i in 1:nfields | ||
# dv_i = get_fe_basis(f.spaces[field_i]) | ||
# @assert BasisStyle(dv_i) == TestBasis() | ||
# dv_i_b = MultiFieldFEBasisComponent(dv_i,field_i,nfields) | ||
# push!(all_febases,dv_i_b) | ||
# end | ||
# MultiFieldCellField(all_febases) | ||
#end | ||
# | ||
#function Gridap.FESpaces.get_trial_fe_basis(f::PatchMultiFieldFESpace) | ||
# nfields = length(f.spaces) | ||
# all_febases = MultiFieldFEBasisComponent[] | ||
# for field_i in 1:nfields | ||
# du_i = get_trial_fe_basis(f.spaces[field_i]) | ||
# @assert BasisStyle(du_i) == TrialBasis() | ||
# du_i_b = MultiFieldFEBasisComponent(du_i,field_i,nfields) | ||
# push!(all_febases,du_i_b) | ||
# end | ||
# MultiFieldCellField(all_febases) | ||
#end | ||
# |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
|
||
using FillArrays | ||
using Gridap | ||
using Gridap.ReferenceFEs, Gridap.FESpaces, Gridap.CellData | ||
|
||
model = CartesianDiscreteModel((0,1,0,1),(3,3)) | ||
|
||
reffe = LagrangianRefFE(Float64,QUAD,1) | ||
conf = H1Conformity() | ||
|
||
V = FESpace(model,reffe) | ||
|
||
cell_conformity = CellConformity(Fill(reffe,num_cells(model)),conf) | ||
cell_dofs = get_fe_dof_basis(V) | ||
|
||
data = CellData.get_data(cell_dofs) | ||
dofs = data[1] | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,147 @@ | ||
using Gridap, Gridap.Adaptivity, Gridap.ReferenceFEs | ||
using GridapDistributed, PartitionedArrays | ||
using GridapP4est, GridapPETSc | ||
using GridapSolvers, GridapSolvers.MultilevelTools, GridapSolvers.LinearSolvers | ||
|
||
function set_ksp_options(ksp) | ||
pc = Ref{GridapPETSc.PETSC.PC}() | ||
mumpsmat = Ref{GridapPETSc.PETSC.Mat}() | ||
@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL) | ||
@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[],GridapPETSc.PETSC.KSPPREONLY) | ||
@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) | ||
@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCLU) | ||
@check_error_code GridapPETSc.PETSC.PCFactorSetMatSolverType(pc[],GridapPETSc.PETSC.MATSOLVERMUMPS) | ||
@check_error_code GridapPETSc.PETSC.PCFactorSetUpMatSolverType(pc[]) | ||
@check_error_code GridapPETSc.PETSC.PCFactorGetMatrix(pc[],mumpsmat) | ||
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 4, 1) | ||
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 28, 2) | ||
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 29, 2) | ||
@check_error_code GridapPETSc.PETSC.MatMumpsSetCntl(mumpsmat[], 3, 1.0e-6) | ||
end | ||
|
||
function compute_matrices(trials,tests,a::Function,l::Function,qdegree) | ||
nlevs = num_levels(trials) | ||
mh = trials.mh | ||
|
||
A = nothing | ||
b = nothing | ||
mats = Vector{PSparseMatrix}(undef,nlevs) | ||
for lev in 1:nlevs | ||
parts = get_level_parts(mh,lev) | ||
if i_am_in(parts) | ||
model = GridapSolvers.get_model(mh,lev) | ||
U = get_fe_space(trials,lev) | ||
V = get_fe_space(tests,lev) | ||
Ω = Triangulation(model) | ||
dΩ = Measure(Ω,qdegree) | ||
ai(u,v) = a(u,v,dΩ) | ||
if lev == 1 | ||
li(v) = l(v,dΩ) | ||
op = AffineFEOperator(ai,li,U,V) | ||
A, b = get_matrix(op), get_vector(op) | ||
mats[lev] = A | ||
else | ||
mats[lev] = assemble_matrix(ai,U,V) | ||
end | ||
end | ||
end | ||
return mats, A, b | ||
end | ||
|
||
function get_patch_smoothers(tests,patch_spaces,patch_decompositions,biform,qdegree) | ||
tests_u, tests_j = tests | ||
patch_spaces_u, patch_spaces_j = patch_spaces | ||
|
||
mh = tests.mh | ||
nlevs = num_levels(mh) | ||
smoothers = Vector{RichardsonSmoother}(undef,nlevs-1) | ||
for lev in 1:nlevs-1 | ||
parts = get_level_parts(mh,lev) | ||
if i_am_in(parts) | ||
PD = patch_decompositions[lev] | ||
Ph = get_fe_space(patch_spaces,lev) | ||
Vh = get_fe_space(tests,lev) | ||
Ω = Triangulation(PD) | ||
dΩ = Measure(Ω,qdegree) | ||
a(u,v) = biform(u,v,dΩ) | ||
local_solver = PETScLinearSolver(set_ksp_options) # IS_ConjugateGradientSolver(;reltol=1.e-6) | ||
patch_smoother = PatchBasedLinearSolver(a,Ph,Vh,local_solver) | ||
smoothers[lev] = RichardsonSmoother(patch_smoother,10,0.2) | ||
end | ||
end | ||
return smoothers | ||
end | ||
|
||
np = 1 # Number of processors | ||
D = 3 # Problem dimension | ||
n_refs_c = 2 # Number of refinements for the coarse model | ||
n_levels = 2 # Number of refinement levels | ||
order = 1 # FE order | ||
|
||
ranks = with_mpi() do distribute | ||
distribute(LinearIndices((np,))) | ||
end | ||
|
||
domain = (D==2) ? (0,1,0,1) : (0,1,0,1,0,1) | ||
nc = Tuple(fill(2,D)) | ||
cmodel = CartesianDiscreteModel(domain,nc) | ||
|
||
mh = GridapP4est.with(ranks) do | ||
num_parts_x_level = fill(np,n_levels) | ||
coarse_model = OctreeDistributedDiscreteModel(ranks,cmodel,n_refs_c) | ||
return ModelHierarchy(ranks,coarse_model,num_parts_x_level) | ||
end; | ||
n_cells = num_cells(GridapSolvers.get_model(mh,1)) | ||
|
||
reffe_u = ReferenceFE(lagrangian,VectorValue{D,Float64},order) | ||
reffe_j = ReferenceFE(raviart_thomas,Float64,order-1) | ||
|
||
tests_u = FESpace(mh,reffe_u;dirichlet_tags="boundary"); | ||
trials_u = TrialFESpace(tests_u); | ||
tests_j = FESpace(mh,reffe_j;dirichlet_tags="boundary"); | ||
trials_j = TrialFESpace(tests_j); | ||
|
||
trials = MultiFieldFESpace([trials_u,trials_j]); | ||
tests = MultiFieldFESpace([tests_u,tests_j]); | ||
|
||
β = 1.0 | ||
γ = 1.0 | ||
B = VectorValue(0.0,0.0,1.0) | ||
f = VectorValue(fill(1.0,D)...) | ||
qdegree = order*2+1 | ||
biform((u,j),(v_u,v_j),dΩ) = ∫(β*∇(u)⊙∇(v_u) -γ*(j×B)⋅v_u + j⋅v_j - (u×B)⋅v_j)dΩ | ||
liform((v_u,v_j),dΩ) = ∫(v_u⋅f)dΩ | ||
smatrices, A, b = compute_matrices(trials,tests,biform,liform,qdegree); | ||
|
||
pbs = GridapSolvers.PatchBasedSmoothers.PatchBoundaryExclude() | ||
patch_decompositions = PatchDecomposition(mh;patch_boundary_style=pbs) | ||
patch_spaces = PatchFESpace(tests,patch_decompositions); | ||
|
||
smoothers = get_patch_smoothers(tests,patch_spaces,patch_decompositions,biform,qdegree) | ||
|
||
restrictions, prolongations = setup_transfer_operators(trials,qdegree;mode=:residual); | ||
|
||
GridapPETSc.with() do | ||
gmg = GMGLinearSolver(mh, | ||
smatrices, | ||
prolongations, | ||
restrictions, | ||
pre_smoothers=smoothers, | ||
post_smoothers=smoothers, | ||
coarsest_solver=PETScLinearSolver(set_ksp_options), | ||
maxiter=1, | ||
rtol=1.0e-10, | ||
verbose=false, | ||
mode=:preconditioner) | ||
|
||
solver = CGSolver(gmg;maxiter=100,atol=1e-10,rtol=1.e-6,verbose=i_am_main(ranks)) | ||
ns = numerical_setup(symbolic_setup(solver,A),A) | ||
|
||
x = pfill(0.0,partition(axes(A,2))) | ||
solve!(x,ns,b) | ||
@time begin | ||
fill!(x,0.0) | ||
solve!(x,ns,b) | ||
end | ||
println("n_dofs = ", length(x)) | ||
end |