Mixed finite element in parallel

Hello,

I have a mixed finite element method, I would like to implement it where I assemble the matrix for each variable and then assemble the global matrix like G = [[A,B], [C,D]], is there anyway to do this in parallel in freefem ? I tried to define G with matcreate(Th, G, [P1,P0,P0]) and then define the other matrices, but it didn’t work.

There are many available examples like FreeFem-sources/examples/hpddm/stokes-block-2d-PETSc.edp at develop · FreeFem/FreeFem-sources · GitHub or FreeFem-sources/examples/hpddm/oseen-2d-PETSc.edp at develop · FreeFem/FreeFem-sources · GitHub.

1 Like

Thank you very much!

I tried doing exactly the same as the code, but it doesn’t work, u get the following error :

 --  mesh:  Nb of Triangles =  21796, Nb of Vertices 11099
  --  mesh:  Nb of Triangles =  21796, Nb of Vertices 11099
  --  mesh:  Nb of Triangles =  21796, Nb of Vertices 11099
  --  mesh:  Nb of Triangles =  21796, Nb of Vertices 11099
 --- global mesh of 21796 elements (prior to refinement) partitioned with metis  --metisA: 4-way Edge-Cut:       3, Balance:  1.01 Nodal=0/Dual 1
 (in 3.306430e-02)
 --- partition of unity built (in 6.010990e-02)
 --- global numbering created (in 7.016000e-04)
 --- global CSR created (in 4.694000e-04)
 --- global numbering created (in 8.848000e-04)
 --- global CSR created (in 9.960000e-05)
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: Caught signal number 11 SEGV: Segmentation Violation, probably memory access out of range
[0]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger
[0]PETSC ERROR: or see https://petsc.org/release/faq/#valgrind and https://petsc.org/release/faq/
[0]PETSC ERROR: configure using --with-debugging=yes, recompile, link, and run
[0]PETSC ERROR: to get more information on the crash.
[0]PETSC ERROR: Run with -malloc_debug to check if memory corruption is causing the crash.

here’s how I set up my matrix,

matrix B = Bmat(VhV, VhP);
    matrix C = Cmat(VhP, VhV);
    real[int] rhsV = Leftsidepcap(0, VhV);

    dA = Amat(VhP, VhP);
    dD = Dmat(VhV, VhV);
    Mat dB(dA, dD, B);
    Mat dC(dD, dA, C);
    Mat dS = [[dA , dB],
              [dC, dD ]];
   // ObjectView(dS);
    set(dS, sparams = "-pc_type lu");

also, I’m trying to do a time stepping method, when defining dB and dC, can I do it outside the loop ?

Thank you for your help,

Benfanich

Can’t run the code => can’t help.

Sorry, here’s the code :

//  run with MPI:  ff-mpirun -np 4 script.edp -wg
// NBPROC 8 

load "PETSc"
macro dimension()2// EOM
include "macro_ddm.idp" 
verbosity=1;
real cpu = clock();
int up = 1000, down = up -1, right = down-1, left = right-1, CURVE=up+1;
tgv=1e300;
////////////////////////Mesh: soil geometry////////////////////////////////////////////
//method parameters
real NN = 100, dt=0.001, t=0., tfin=100000, L=100; int i;
func real curve(real t) {
    return      L * (0.1 * (1 - cos(pi * (t/(L)))) + 0.45) /* (t <= 50) ? 50 : 50 + 0.5 * (t-50) */;
}
Mat A, D;
  
border DOWN(t=0,L){x=t; y=0; label=down;}
border right1(t=0, curve(L)){x=L; y=t; label = right;}
border curvee(t=L,0){x=t; y=curve(t); label = CURVE;}	
border left1(t=curve(0),0){x=0; y=t; label=left;}
border left2(t=L,curve(0)){x=0; y=t; label = left;}
border UP(t=L,0){x=t; y=L; label=up;}
border right2(t=curve(L),L){x=L; y=t; label = right;}
real coef1=curve(L)*1./L, coef2=curve(0)*1./L;
mesh Th = buildmesh(DOWN(NN)+right1(coef1*NN)+curvee(NN)+left1(coef2*NN)+left2((1-coef2)*NN)+UP(NN)+right2((1-coef1)*NN));
fespace Ph(Th,P0);
fespace Vh(Th, P1);
fespace Wh(Th, [P0,P0]);
DmeshCreate(Th);
{
    macro def(i)i//
    macro init(i)i//
    MatCreate(Th, A, P1);
}
{
    macro def(i)[i, i#B]//
    macro init(i)[i, i]//
    MatCreate(Th, D, [P0,P0]);
}

//buildDmesh(Th);


for(i=1; i*dt <= tfin ; i++){
    matrix B=Bmat(Wh,Vh);
    matrix C=Cmat(Vh,Wh);
    real[int] rhvflux = Leftsidepcap(0,Wh),rhvs = Leftsides(0,Vh);

    A=Amat(Vh,Vh);
    D=Dmat(Wh,Wh);
    Mat dC(A,D, C);
    Mat dB(D,A, B);
    
    Mat AA = [[A,dB],
        [dC,D]];
    set(AA, sparams = "-pc_type lu -pc_factor_mat_solver_type mumps");
    real[int] rhs(rhvflux.n + rhvs.n);
    rhs(0:rhvs.n - 1) = rhvs;
    rhs(rhvs.n: rhvflux.n + rhvs.n - 1) = rhvflux;
    real[int] sol = AA^-1 * rhs;
    /* VhV def2(solV);
    solV[] = sol(0:sol.n - VhP.ndof - 1); */
    Vh solS;
    solS[] = sol(0: rhvs.n - 1);
    t=i*dt;
    //richardPcap;
    //if(i%60==0){savevtk(vtkfolder+"100x100_saturation.vtu",Th,s,dataname="saturation", append = 1);}
    sold=s;
    toplot=s*phi + thetar;
    if(mpirank == 0) {cout << "Time : " << t << "-----" << s[].min << endl;}
    if(i%1==0){
        macro params(toplot)wait=0,prev=1, dim=2, fill=1, value = 1,viso=viso(0:viso.n-1), nbiso=30,cmm=  "min: " +toplot[].min+" || max: "+toplot[].max+ "|| Time: "+t// EOM
        plotD(Th, toplot, params(toplot));}
} 

You got the order wrong, it should be:

    Mat dC(A,D, B);
    Mat dB(D,A, C);
    
    Mat AA = [[A,dC],
        [dB,D]];
1 Like

Thank you it’s working.