Actual source code: util.c
petsc-3.15.0 2021-03-30
1: /*
2: GAMG geometric-algebric multigrid PC - Mark Adams 2011
3: */
4: #include <petsc/private/matimpl.h>
5: #include <../src/ksp/pc/impls/gamg/gamg.h>
7: /*
8: Produces a set of block column indices of the matrix row, one for each block represented in the original row
10: n - the number of block indices in cc[]
11: cc - the block indices (must be large enough to contain the indices)
12: */
13: PETSC_STATIC_INLINE PetscErrorCode MatCollapseRow(Mat Amat,PetscInt row,PetscInt bs,PetscInt *n,PetscInt *cc)
14: {
15: PetscInt cnt = -1,nidx,j;
16: const PetscInt *idx;
20: MatGetRow(Amat,row,&nidx,&idx,NULL);
21: if (nidx) {
22: cnt = 0;
23: cc[cnt] = idx[0]/bs;
24: for (j=1; j<nidx; j++) {
25: if (cc[cnt] < idx[j]/bs) cc[++cnt] = idx[j]/bs;
26: }
27: }
28: MatRestoreRow(Amat,row,&nidx,&idx,NULL);
29: *n = cnt+1;
30: return(0);
31: }
33: /*
34: Produces a set of block column indices of the matrix block row, one for each block represented in the original set of rows
36: ncollapsed - the number of block indices
37: collapsed - the block indices (must be large enough to contain the indices)
38: */
39: PETSC_STATIC_INLINE PetscErrorCode MatCollapseRows(Mat Amat,PetscInt start,PetscInt bs,PetscInt *w0,PetscInt *w1,PetscInt *w2,PetscInt *ncollapsed,PetscInt **collapsed)
40: {
41: PetscInt i,nprev,*cprev = w0,ncur = 0,*ccur = w1,*merged = w2,*cprevtmp;
45: MatCollapseRow(Amat,start,bs,&nprev,cprev);
46: for (i=start+1; i<start+bs; i++) {
47: MatCollapseRow(Amat,i,bs,&ncur,ccur);
48: PetscMergeIntArray(nprev,cprev,ncur,ccur,&nprev,&merged);
49: cprevtmp = cprev; cprev = merged; merged = cprevtmp;
50: }
51: *ncollapsed = nprev;
52: if (collapsed) *collapsed = cprev;
53: return(0);
54: }
57: /* -------------------------------------------------------------------------- */
58: /*
59: PCGAMGCreateGraph - create simple scaled scalar graph from matrix
61: Input Parameter:
62: . Amat - matrix
63: Output Parameter:
64: . a_Gmaat - eoutput scalar graph (symmetric?)
65: */
66: PetscErrorCode PCGAMGCreateGraph(Mat Amat, Mat *a_Gmat)
67: {
69: PetscInt Istart,Iend,Ii,jj,kk,ncols,nloc,NN,MM,bs;
70: MPI_Comm comm;
71: Mat Gmat;
74: PetscObjectGetComm((PetscObject)Amat,&comm);
75: MatGetOwnershipRange(Amat, &Istart, &Iend);
76: MatGetSize(Amat, &MM, &NN);
77: MatGetBlockSize(Amat, &bs);
78: nloc = (Iend-Istart)/bs;
80: PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);
82: /* TODO GPU: these calls are potentially expensive if matrices are large and we want to use the GPU */
83: /* A solution consists in providing a new API, MatAIJGetCollapsedAIJ, and each class can provide a fast
84: implementation */
85: if (bs > 1) {
86: const PetscScalar *vals;
87: const PetscInt *idx;
88: PetscInt *d_nnz, *o_nnz,*w0,*w1,*w2;
89: PetscBool ismpiaij,isseqaij;
91: /*
92: Determine the preallocation needed for the scalar matrix derived from the vector matrix.
93: */
95: PetscObjectBaseTypeCompare((PetscObject)Amat,MATSEQAIJ,&isseqaij);
96: PetscObjectBaseTypeCompare((PetscObject)Amat,MATMPIAIJ,&ismpiaij);
97: PetscMalloc2(nloc, &d_nnz,isseqaij ? 0 : nloc, &o_nnz);
99: if (isseqaij) {
100: PetscInt max_d_nnz;
102: /*
103: Determine exact preallocation count for (sequential) scalar matrix
104: */
105: MatSeqAIJGetMaxRowNonzeros(Amat,&max_d_nnz);
106: max_d_nnz = PetscMin(nloc,bs*max_d_nnz);
107: PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);
108: for (Ii = 0, jj = 0; Ii < Iend; Ii += bs, jj++) {
109: MatCollapseRows(Amat,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);
110: }
111: PetscFree3(w0,w1,w2);
113: } else if (ismpiaij) {
114: Mat Daij,Oaij;
115: const PetscInt *garray;
116: PetscInt max_d_nnz;
118: MatMPIAIJGetSeqAIJ(Amat,&Daij,&Oaij,&garray);
120: /*
121: Determine exact preallocation count for diagonal block portion of scalar matrix
122: */
123: MatSeqAIJGetMaxRowNonzeros(Daij,&max_d_nnz);
124: max_d_nnz = PetscMin(nloc,bs*max_d_nnz);
125: PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);
126: for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) {
127: MatCollapseRows(Daij,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);
128: }
129: PetscFree3(w0,w1,w2);
131: /*
132: Over estimate (usually grossly over), preallocation count for off-diagonal portion of scalar matrix
133: */
134: for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) {
135: o_nnz[jj] = 0;
136: for (kk=0; kk<bs; kk++) { /* rows that get collapsed to a single row */
137: MatGetRow(Oaij,Ii+kk,&ncols,NULL,NULL);
138: o_nnz[jj] += ncols;
139: MatRestoreRow(Oaij,Ii+kk,&ncols,NULL,NULL);
140: }
141: if (o_nnz[jj] > (NN/bs-nloc)) o_nnz[jj] = NN/bs-nloc;
142: }
144: } else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require AIJ matrix type");
146: /* get scalar copy (norms) of matrix */
147: MatCreate(comm, &Gmat);
148: MatSetSizes(Gmat,nloc,nloc,PETSC_DETERMINE,PETSC_DETERMINE);
149: MatSetBlockSizes(Gmat, 1, 1);
150: MatSetType(Gmat, MATAIJ);
151: MatSeqAIJSetPreallocation(Gmat,0,d_nnz);
152: MatMPIAIJSetPreallocation(Gmat,0,d_nnz,0,o_nnz);
153: PetscFree2(d_nnz,o_nnz);
155: for (Ii = Istart; Ii < Iend; Ii++) {
156: PetscInt dest_row = Ii/bs;
157: MatGetRow(Amat,Ii,&ncols,&idx,&vals);
158: for (jj=0; jj<ncols; jj++) {
159: PetscInt dest_col = idx[jj]/bs;
160: PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
161: MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES);
162: }
163: MatRestoreRow(Amat,Ii,&ncols,&idx,&vals);
164: }
165: MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);
166: MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);
167: } else {
168: /* just copy scalar matrix - abs() not taken here but scaled later */
169: MatDuplicate(Amat, MAT_COPY_VALUES, &Gmat);
170: }
171: MatPropagateSymmetryOptions(Amat, Gmat);
173: PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);
175: *a_Gmat = Gmat;
176: return(0);
177: }
179: /* -------------------------------------------------------------------------- */
180: /*@C
181: PCGAMGFilterGraph - filter (remove zero and possibly small values from the) graph and make it symmetric if requested
183: Collective on Mat
185: Input Parameter:
186: + a_Gmat - the graph
187: . vfilter - threshold parameter [0,1)
188: - symm - make the result symmetric
190: Level: developer
192: Notes:
193: This is called before graph coarsers are called.
195: .seealso: PCGAMGSetThreshold()
196: @*/
197: PetscErrorCode PCGAMGFilterGraph(Mat *a_Gmat,PetscReal vfilter,PetscBool symm)
198: {
199: PetscErrorCode ierr;
200: PetscInt Istart,Iend,Ii,jj,ncols,nnz0,nnz1, NN, MM, nloc;
201: PetscMPIInt rank;
202: Mat Gmat = *a_Gmat, tGmat;
203: MPI_Comm comm;
204: const PetscScalar *vals;
205: const PetscInt *idx;
206: PetscInt *d_nnz, *o_nnz;
207: Vec diag;
210: PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);
212: /* TODO GPU: optimization proposal, each class provides fast implementation of this
213: procedure via MatAbs API */
214: if (vfilter < 0.0 && !symm) {
215: /* Just use the provided matrix as the graph but make all values positive */
216: MatInfo info;
217: PetscScalar *avals;
218: PetscBool isaij,ismpiaij;
219: PetscObjectBaseTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isaij);
220: PetscObjectBaseTypeCompare((PetscObject)Gmat,MATMPIAIJ,&ismpiaij);
221: if (!isaij && !ismpiaij) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require (MPI)AIJ matrix type");
222: if (isaij) {
223: MatGetInfo(Gmat,MAT_LOCAL,&info);
224: MatSeqAIJGetArray(Gmat,&avals);
225: for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
226: MatSeqAIJRestoreArray(Gmat,&avals);
227: } else {
228: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)Gmat->data;
229: MatGetInfo(aij->A,MAT_LOCAL,&info);
230: MatSeqAIJGetArray(aij->A,&avals);
231: for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
232: MatSeqAIJRestoreArray(aij->A,&avals);
233: MatGetInfo(aij->B,MAT_LOCAL,&info);
234: MatSeqAIJGetArray(aij->B,&avals);
235: for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
236: MatSeqAIJRestoreArray(aij->B,&avals);
237: }
238: PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);
239: return(0);
240: }
242: /* TODO GPU: this can be called when filter = 0 -> Probably provide MatAIJThresholdCompress that compresses the entries below a threshold?
243: Also, if the matrix is symmetric, can we skip this
244: operation? It can be very expensive on large matrices. */
245: PetscObjectGetComm((PetscObject)Gmat,&comm);
246: MPI_Comm_rank(comm,&rank);
247: MatGetOwnershipRange(Gmat, &Istart, &Iend);
248: nloc = Iend - Istart;
249: MatGetSize(Gmat, &MM, &NN);
251: if (symm) {
252: Mat matTrans;
253: MatTranspose(Gmat, MAT_INITIAL_MATRIX, &matTrans);
254: MatAXPY(Gmat, 1.0, matTrans, Gmat->structurally_symmetric ? SAME_NONZERO_PATTERN : DIFFERENT_NONZERO_PATTERN);
255: MatDestroy(&matTrans);
256: }
258: /* scale Gmat for all values between -1 and 1 */
259: MatCreateVecs(Gmat, &diag, NULL);
260: MatGetDiagonal(Gmat, diag);
261: VecReciprocal(diag);
262: VecSqrtAbs(diag);
263: MatDiagonalScale(Gmat, diag, diag);
264: VecDestroy(&diag);
266: /* Determine upper bound on nonzeros needed in new filtered matrix */
267: PetscMalloc2(nloc, &d_nnz,nloc, &o_nnz);
268: for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
269: MatGetRow(Gmat,Ii,&ncols,NULL,NULL);
270: d_nnz[jj] = ncols;
271: o_nnz[jj] = ncols;
272: MatRestoreRow(Gmat,Ii,&ncols,NULL,NULL);
273: if (d_nnz[jj] > nloc) d_nnz[jj] = nloc;
274: if (o_nnz[jj] > (MM-nloc)) o_nnz[jj] = MM - nloc;
275: }
276: MatCreate(comm, &tGmat);
277: MatSetSizes(tGmat,nloc,nloc,MM,MM);
278: MatSetBlockSizes(tGmat, 1, 1);
279: MatSetType(tGmat, MATAIJ);
280: MatSeqAIJSetPreallocation(tGmat,0,d_nnz);
281: MatMPIAIJSetPreallocation(tGmat,0,d_nnz,0,o_nnz);
282: MatSetOption(tGmat,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);
283: PetscFree2(d_nnz,o_nnz);
285: for (Ii = Istart, nnz0 = nnz1 = 0; Ii < Iend; Ii++) {
286: MatGetRow(Gmat,Ii,&ncols,&idx,&vals);
287: for (jj=0; jj<ncols; jj++,nnz0++) {
288: PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
289: if (PetscRealPart(sv) > vfilter) {
290: nnz1++;
291: MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,INSERT_VALUES);
292: }
293: }
294: MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals);
295: }
296: MatAssemblyBegin(tGmat,MAT_FINAL_ASSEMBLY);
297: MatAssemblyEnd(tGmat,MAT_FINAL_ASSEMBLY);
298: if (symm) {
299: MatSetOption(tGmat,MAT_SYMMETRIC,PETSC_TRUE);
300: } else {
301: MatPropagateSymmetryOptions(Gmat,tGmat);
302: }
303: PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);
305: #if defined(PETSC_USE_INFO)
306: {
307: double t1 = (!nnz0) ? 1. : 100.*(double)nnz1/(double)nnz0, t2 = (!nloc) ? 1. : (double)nnz0/(double)nloc;
308: PetscInfo4(*a_Gmat,"\t %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%D)\n",t1,vfilter,t2,MM);
309: }
310: #endif
311: MatDestroy(&Gmat);
312: *a_Gmat = tGmat;
313: return(0);
314: }
316: /* -------------------------------------------------------------------------- */
317: /*
318: PCGAMGGetDataWithGhosts - hacks into Mat MPIAIJ so this must have size > 1
320: Input Parameter:
321: . Gmat - MPIAIJ matrix for scattters
322: . data_sz - number of data terms per node (# cols in output)
323: . data_in[nloc*data_sz] - column oriented data
324: Output Parameter:
325: . a_stride - numbrt of rows of output
326: . a_data_out[stride*data_sz] - output data with ghosts
327: */
328: PetscErrorCode PCGAMGGetDataWithGhosts(Mat Gmat,PetscInt data_sz,PetscReal data_in[],PetscInt *a_stride,PetscReal **a_data_out)
329: {
331: Vec tmp_crds;
332: Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data;
333: PetscInt nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc;
334: PetscScalar *data_arr;
335: PetscReal *datas;
336: PetscBool isMPIAIJ;
339: PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ);
340: MatGetOwnershipRange(Gmat, &my0, &Iend);
341: nloc = Iend - my0;
342: VecGetLocalSize(mpimat->lvec, &num_ghosts);
343: nnodes = num_ghosts + nloc;
344: *a_stride = nnodes;
345: MatCreateVecs(Gmat, &tmp_crds, NULL);
347: PetscMalloc1(data_sz*nnodes, &datas);
348: for (dir=0; dir<data_sz; dir++) {
349: /* set local, and global */
350: for (kk=0; kk<nloc; kk++) {
351: PetscInt gid = my0 + kk;
352: PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */
353: datas[dir*nnodes + kk] = PetscRealPart(crd);
355: VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES);
356: }
357: VecAssemblyBegin(tmp_crds);
358: VecAssemblyEnd(tmp_crds);
359: /* get ghost datas */
360: VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);
361: VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);
362: VecGetArray(mpimat->lvec, &data_arr);
363: for (kk=nloc,jj=0;jj<num_ghosts;kk++,jj++) datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]);
364: VecRestoreArray(mpimat->lvec, &data_arr);
365: }
366: VecDestroy(&tmp_crds);
367: *a_data_out = datas;
368: return(0);
369: }
371: PetscErrorCode PCGAMGHashTableCreate(PetscInt a_size, PCGAMGHashTable *a_tab)
372: {
374: PetscInt kk;
377: a_tab->size = a_size;
378: PetscMalloc2(a_size, &a_tab->table,a_size, &a_tab->data);
379: for (kk=0; kk<a_size; kk++) a_tab->table[kk] = -1;
380: return(0);
381: }
383: PetscErrorCode PCGAMGHashTableDestroy(PCGAMGHashTable *a_tab)
384: {
388: PetscFree2(a_tab->table,a_tab->data);
389: return(0);
390: }
392: PetscErrorCode PCGAMGHashTableAdd(PCGAMGHashTable *a_tab, PetscInt a_key, PetscInt a_data)
393: {
394: PetscInt kk,idx;
397: if (a_key<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Negative key %D.",a_key);
398: for (kk = 0, idx = GAMG_HASH(a_key); kk < a_tab->size; kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1) {
399: if (a_tab->table[idx] == a_key) {
400: /* exists */
401: a_tab->data[idx] = a_data;
402: break;
403: } else if (a_tab->table[idx] == -1) {
404: /* add */
405: a_tab->table[idx] = a_key;
406: a_tab->data[idx] = a_data;
407: break;
408: }
409: }
410: if (kk==a_tab->size) {
411: /* this is not to efficient, waiting until completely full */
412: PetscInt oldsize = a_tab->size, new_size = 2*a_tab->size + 5, *oldtable = a_tab->table, *olddata = a_tab->data;
415: a_tab->size = new_size;
416: PetscMalloc2(a_tab->size, &a_tab->table,a_tab->size, &a_tab->data);
417: for (kk=0;kk<a_tab->size;kk++) a_tab->table[kk] = -1;
418: for (kk=0;kk<oldsize;kk++) {
419: if (oldtable[kk] != -1) {
420: PCGAMGHashTableAdd(a_tab, oldtable[kk], olddata[kk]);
421: }
422: }
423: PetscFree2(oldtable,olddata);
424: PCGAMGHashTableAdd(a_tab, a_key, a_data);
425: }
426: return(0);
427: }