-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathPoisson2D_DMDA.c
184 lines (158 loc) · 7.36 KB
/
Poisson2D_DMDA.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
#include <petsc.h>
#include <petscksp.h> //KSP solver library
#include <math.h>
#include <mpi.h>
#include "petscvec.h"
#undef __FUNCT__
#define __FUNCT__ "main"
int main(int argc, char **args)
{
Vec u, b ; // Numerical solution, RHS
Mat A; // Coefficient Matrix
KSP ksp; // Linear solver context
PC pc; // Preconditioner context
PetscReal norm, normb; // To get relative residue
PetscErrorCode ierr;
PetscInt i, j, nloc;
PetscInt N = 1600, its, n; // Default size of A
PetscScalar pi = 4 * atan(1.0);
//PetscViewer viewer, lab; // To print soln vector to text file
DMBoundaryType bx = DM_BOUNDARY_NONE, by = DM_BOUNDARY_NONE;
DMDAStencilType stype = DMDA_STENCIL_STAR; // five-pt stencil for five-pt laplacian
PetscInitialize(&argc, &args, (char*)0, 0); // This also initialized MPI
ierr = PetscOptionsGetInt(NULL, "-n", &n, NULL); CHKERRQ(ierr);
// Initialize MPI and get number of processes and my number or rank
int numprocs, myid;
double startTime, endTime;
MPI_Comm_size(MPI_COMM_WORLD, &numprocs);
MPI_Comm_rank(MPI_COMM_WORLD, &myid);
// Compute the matrix and right-hand-side vector that define the linear system, Au = b.
DM da; // DM (data management) object
nloc = pow(N, 0.5); // size of interior mesh
// Create 2D grid
ierr = DMDACreate2d(PETSC_COMM_WORLD, bx, by, stype, -nloc, -nloc, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da); CHKERRQ(ierr);
// Or input global grid size at command line using for example: -da_grid_x 1000 -da_grid_y 1000
ierr = DMSetUp(da); CHKERRQ(ierr);
ierr = DMSetFromOptions(da); CHKERRQ(ierr);
// Define the unit square over which we are solving (note: 2D problem ignores z values)
ierr = DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0); CHKERRQ(ierr);
DMDALocalInfo info;
ierr = DMDAGetLocalInfo(da, &info);
// Display information about the 2D grid
ierr = PetscPrintf(PETSC_COMM_WORLD, "\nNumber of local grid points per individual process\n"); CHKERRQ(ierr);
ierr = PetscPrintf(PETSC_COMM_WORLD, "local grid points xm = %d\n", info.xm); CHKERRQ(ierr);
ierr = PetscPrintf(PETSC_COMM_WORLD, "local grid points ym = %d\n", info.ym); CHKERRQ(ierr);
ierr = PetscPrintf(PETSC_COMM_WORLD, "\nNumber of global grid points\n"); CHKERRQ(ierr);
ierr = PetscPrintf(PETSC_COMM_WORLD, "Global grid points mx = %d\n", info.mx); CHKERRQ(ierr);
ierr = PetscPrintf(PETSC_COMM_WORLD, "Global grid points my = %d\n", info.my); CHKERRQ(ierr);
//mx is global number of grid points in x direction
PetscScalar hx = 1.0 / (info.mx+1); // x grid spacing
PetscScalar hy = 1.0 / (info.my+1); // y grid spacing
ierr = PetscPrintf(PETSC_COMM_WORLD, "\nThe grid spacing is: \n"); CHKERRQ(ierr);
ierr = PetscPrintf(PETSC_COMM_WORLD, "hx is = %.5f\n", hx); CHKERRQ(ierr);
ierr = PetscPrintf(PETSC_COMM_WORLD, "hy is = %.5f\n", hy); CHKERRQ(ierr);
// Processor zero starts its clock
if (myid == 0)
{
startTime = MPI_Wtime();
}
// Create DM Matrix of type mpiaij (default)
ierr = DMCreateMatrix(da, &A); CHKERRQ(ierr);
// Set matrix values
PetscInt ncols;
PetscScalar v[5];
MatStencil col[5], row; // stores grid coordinates over the 2D grid
// Set matrix elements for the 2-D, five-point stencil in parallel.
// Taking advantage of the stencil structure of the DMDA to write into my matrix
for (j = info.ys; j < info.ys + info.ym; j++){ // xs and ys are starting points of this processor
for (i = info.xs; i < info.xs + info.xm; i++) {
ncols = 0;
row.i = i; // indexing is global (across the entire 2D grid)
row.j = j;
if (i > 0) {
col[ncols].i = i - 1;
col[ncols].j = j;
v[ncols++] = 1.0; // sub diagonal
}
if (i < info.mx-1) {
col[ncols].i = i + 1;
col[ncols].j = j;
v[ncols++] = 1.0; // super diagonal
}
if (j > 0) {
col[ncols].i = i;
col[ncols].j = j - 1;
v[ncols++] = 1.0; // other lower diagonal
}
if (j < info.my - 1) {
col[ncols].i = i;
col[ncols].j = j+1;
v[ncols++] = 1.0; // other upper diagonal
}
col[ncols].i = i;
col[ncols].j = j;
v[ncols++] = -4.0; // main diagonal entries
ierr = MatSetValuesStencil(A, 1, &row, ncols, col, v, INSERT_VALUES); CHKERRQ(ierr);
}
}
// Assemble the matrix
ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
// Create vectors.
ierr = DMCreateGlobalVector(da, &u); CHKERRQ(ierr); // Solution vector
ierr = DMCreateGlobalVector(da, &b); CHKERRQ(ierr);
PetscScalar **array;
ierr = DMDAVecGetArray(da, b, &array); CHKERRQ(ierr);
for (j = info.ys; j < info.ys + info.ym; j++) {
for (i = info.xs; i < info.xs + info.xm; i++) {
array[j][i] = -20*pi*pi*hx*hx*sin(2 * pi*(j+1)*hx)*sin(4 * pi*(i+1)*hx);
}
}
ierr = DMDAVecRestoreArray(da, b, &array); CHKERRQ(ierr);
ierr = VecAssemblyBegin(b); CHKERRQ(ierr);
ierr = VecAssemblyEnd(b); CHKERRQ(ierr);
// Print to standard terminal for debugging purposes
//printf("\n rhs vector is: \n");
//ierr = VecView(b, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
//printf("\n Matrix A IS: \n");
//ierr = MatView(A, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
// Creates linear solver context
ierr = KSPCreate(PETSC_COMM_WORLD, &ksp); CHKERRQ(ierr);
ierr = KSPSetOperators(ksp, A, A); CHKERRQ(ierr); // Preconditioning matrix is A
ierr = KSPSetType(ksp, KSPGMRES); // Preconditioned GMRES Method
ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr);
ierr = PCSetType(pc, PCGAMG); CHKERRQ(ierr); // GAMG Preconditioner
ierr = KSPSetTolerances(ksp, 1e-07, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT); CHKERRQ(ierr);
ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr);
ierr = PCSetFromOptions(pc); CHKERRQ(ierr);
//Solve linear system
ierr = KSPSolve(ksp, b, u); CHKERRQ(ierr); // u is solution vector : n x 1
// Stop Timer
if (myid == 0)
{
endTime = MPI_Wtime();
ierr = PetscPrintf(PETSC_COMM_WORLD, "\nRuntime is = %.16f\n", endTime-startTime); CHKERRQ(ierr);
}
// Print solution vector of 64-by-64 matrix to text file
//char name[] = "soln_64by64";
//ierr = PetscViewerCreate(PETSC_COMM_WORLD, &lab); CHKERRQ(ierr);
//ierr = PetscViewerSetType(lab, PETSCVIEWERASCII); CHKERRQ(ierr);
//ierr = PetscViewerFileSetMode(lab, FILE_MODE_WRITE); CHKERRQ(ierr);
//ierr = PetscViewerFileSetName(lab, name); CHKERRQ(ierr);
//ierr = PetscViewerASCIIOpen(PETSC_COMM_WORLD, "soln_64by64.out", &viewer); CHKERRQ(ierr);
//ierr = VecView(u, viewer); CHKERRQ(ierr);
ierr = KSPView(ksp, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
// Show the residue and total no. of iterations
ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr);
ierr = VecNorm(b, NORM_2, &normb); CHKERRQ(ierr);
ierr = KSPGetResidualNorm(ksp, &norm); CHKERRQ(ierr);
// print out norm of residual
ierr = PetscPrintf(PETSC_COMM_WORLD, "Residual = %e, Iterations = %D\n\n", norm, its); CHKERRQ(ierr);
// Destroy PETSc objects
ierr = VecDestroy(&u); CHKERRQ(ierr);
ierr = VecDestroy(&b); CHKERRQ(ierr);
ierr = MatDestroy(&A); CHKERRQ(ierr);
ierr = KSPDestroy(&ksp); CHKERRQ(ierr);
ierr = PetscFinalize(); // Finalizes Petsc Libary as well as MPI
return 0;
}