Thanks, I modified the stokes-fieldsplit-3d-PETSc.edp by adding the macro ThN2O() n2o// and restrict. However i don’t know where i missed, the program reports errors at restrict function.
Here is the program and the report:
// run with MPI: ff-mpirun -np 4 script.edp
// NBPROC 4
load "PETSc" // PETSc plugin
macro dimension()3// EOM // 2D or 3D
include "macro_ddm.idp" // additional DDM functions
macro def(i)[i, i#B, i#C, i#D]// // vector field definition
macro init(i)[i, i, i, i]// EOM // vector field initialization
macro grad(u)[dx(u), dy(u), dz(u)]//// three-dimensional gradient
real Sqrt = sqrt(2.);
macro div(u)(dx(u) + dy(u#B) + dz(u#C))// EOM
func Pk = [P2, P2, P2, P1]; // finite element space
string solver;
if(!HasType("MATSOLVER", "mumps") && !HasType("MATSOLVER", "superlu"))
exit(0);
else
solver = (HasType("MATSOLVER", "mumps") ? "mumps" : "superlu");
mesh3 Th;
{
mesh ThGlobal2d = square(getARGV("-global", 12), getARGV("-global", 12), [x, y]); // global mesh
ThGlobal2d = trunc(ThGlobal2d, (x <= 0.5) || (y <= 0.5), label = 5);
ThGlobal2d = trunc(ThGlobal2d, (y >= 0.25) || (x >= 0.25), label = 5);
mesh Th2d = movemesh(ThGlobal2d, [-x, y]);
ThGlobal2d = ThGlobal2d + Th2d;
Th = buildlayers(ThGlobal2d, getARGV("-global", 12) / 2, zbound = [0, 0.4]);
}
mesh3 ThGolbal=Th;
int[int] myN2O;
macro ThN2O() myN2O//
Mat A;
buildMat(Th, getARGV("-split",1),A, Pk, mpiCommWorld)
fespace Wh(Th, Pk); // local finite element space
varf vPb([u, uB, uC, p], [v, vB, vC, q]) = int3d(Th)(grad(u)' * grad(v) + grad(uB)' * grad(vB) + grad(uC)' * grad(vC) - div(u) * q - div(v) * p + 1e-10 * p * q) + on(0, 1, 3, 5, u = 0, uB = 0, uC = 0) + on(2, u = 1000*y*(0.5-y)*z*(0.4-z), uB = 0, uC = 0);
real[int] rhs = vPb(0, Wh, tgv = -1);
Wh<real> def(u)=[1,1,1,2];
string[int] names(2);
names[0] = "velocity";
names[1] = "pressure";
A = vPb(Wh, Wh, tgv = -1);
//set(A, sparams = "-ksp_type fgmres -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_fact_type lower -pc_fieldsplit_detect_saddle_point -fieldsplit_velocity_sub_pc_type lu " + " -fieldsplit_pressure_sub_pc_type lu -fieldsplit_velocity_sub_pc_factor_mat_solver_type " + solver + " -fieldsplit_pressure_sub_pc_factor_mat_solver_type " + solver + " -ksp_monitor -ksp_view " + " -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_ksp_max_it 5 -fieldsplit_pressure_ksp_type gmres -fieldsplit_pressure_ksp_max_it 5 -ksp_rtol 1e-6", fields = u[], names = names);
set
u[] = 0.0;
u[] = A^-1 * rhs;
fespace WhGlobal(ThGolbal,Pk);
int[int] subIdx=restrict(Wh,WhGlobal,myN2O);
//WhGlobal<real> def(uGlobal),def(uSum);
//real[int] locPETSc;
//ChangeNumbering(A,u[],locPETSc);
//ChangeNumbering(A,u[],locPETSc, inverse=true);
//uGlobal[](subIdx)=u[];
//mpiAllReduce(uGlobal[],uSum[],mpiCommWorld,mpiSUM);
//uGlobal[]=0;
//u[].*=A.D;
//uGlobal[](subIdx)=u[];
//mpiAllReduce(uGlobal[],uSum[],mpiCommWorld,mpiSUM);
//macro def3(u)[u, u#B, u#C]// EOM
//macro def1(u)[u#D]// EOM
//savevtk("stokes-fieldsplit-3d-petcs",def3(uSum),def1(uSum),order=[1,1]);
.........
Mat Object: 8 MPI processes
type: mpiaij
rows=35635, cols=35635
total: nonzeros=3176161, allocated nonzeros=3176161
total number of mallocs used during MatSetValues calls=0
using I-node (on process 0) routines: found 1175 nodes, limit used is 5
--- system solved with PETSc (in 1.203322e+01)
-- Build Nodes/DF on mesh : n.v. 1666, n. elmt. 7128, n b. elmt. 1836
nb of Nodes 11323 nb of DoF 35635 DFon=4300
-- FESpace: Nb of Nodes 11323 Nb of DoF 35635
-- Build Nodes/DF on mesh : n.v. 1666, n. elmt. 7128, n b. elmt. 1836
nb of Nodes 11323 nb of DoF 35635 DFon=4300
-- FESpace: Nb of Nodes 11323 Nb of DoF 35635
[3]PETSC ERROR: ------------------------------------------------------------------------
[3]PETSC ERROR: Caught signal number 11 SEGV: Segmentation Violation, probably memory access out of range
[3]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger
[3]PETSC ERROR: or see https://petsc.org/release/faq/#valgrind
[3]PETSC ERROR: or try http://valgrind.org on GNU/linux and Apple MacOS to find memory corruption errors
[3]PETSC ERROR: configure using --with-debugging=yes, recompile, link, and run
[3]PETSC ERROR: to get more information on the crash.
[3]PETSC ERROR: --------------------- Error Message --------------------------------------------------------------
[3]PETSC ERROR: Signal received
[3]PETSC ERROR: See https://petsc.org/release/faq/ for trouble shooting.
[3]PETSC ERROR: Petsc Release Version 3.17.0, Mar 30, 2022
[3]PETSC ERROR: /usr/local/bin/FreeFem++-mpi on a named debian by dyu Thu Sep 8 17:54:22 2022
[3]PETSC ERROR: [5]PETSC ERROR: ------------------------------------------------------------------------
[5]PETSC ERROR: Caught signal number 11 SEGV: Segmentation Violation, probably memory access out of range
[5]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger
[5]PETSC ERROR: or see https://petsc.org/release/faq/#valgrind
[5]PETSC ERROR: or try http://valgrind.org on GNU/linux and Apple MacOS to find memory corruption errors
[5]PETSC ERROR: configure using --with-debugging=yes, recompile, link, and run
[5]PETSC ERROR: to get more information on the crash.
[5]PETSC ERROR: --------------------- Error Message --------------------------------------------------------------
[5]PETSC ERROR: Signal received
[5]PETSC ERROR: See https://petsc.org/release/faq/ for trouble shooting.
[5]PETSC ERROR: Petsc Release Version 3.17.0, Mar 30, 2022
[5]PETSC ERROR: /usr/local/bin/FreeFem++-mpi on a named debian by dyu Thu Sep 8 17:54:22 2022
[5]PETSC ERROR: Configure options --prefix=/usr/local/ff-petsc/r MAKEFLAGS= --with-debugging=0 COPTFLAGS="-O3 -mtune=native" CXXOPTFLAGS="-O3 -mtune=native" FOPTFLAGS="-O3 -mtune=native" --with-cxx-dialect=11 --with-ssl=0 --with-x=0 --with-fortran-bindings=0 --with-cudac=0 --with-cc=/usr/bin/mpicc --with-cxx=/usr/bin/mpic++ --with-fc=/usr/bin/mpif90 --with-scalar-type=real --with-blaslapack-include= --with-blaslapack-lib="-llapack -lblas" --download-metis --download-ptscotch --download-hypre --download-parmetis --download-mmg --download-parmmg --download-superlu --download-suitesparse --download-tetgen --download-slepc --download-hpddm --download-slepc-configure-arguments=--download-arpack=https://github.com/prj-/arpack-ng/archive/9fc0c71.tar.gz --download-scalapack --download-mumps PETSC_ARCH=fr
[5]PETSC ERROR: #1 User provided function() at unknown file:0
[5]PETSC ERROR: Run with -malloc_debug to check if memory corruption is causing the crash.
--------------------------------------------------------------------------
MPI_ABORT was invoked on rank 5 in communicator MPI_COMM_WORLD
with errorcode 59.
.........