Actual source code: mmaij.c

petsc-3.4.2 2013-07-02
  2: /*
  3:    Support for the parallel AIJ matrix vector multiply
  4: */
  5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  6: #include <petsc-private/isimpl.h>    /* needed because accesses data structure of ISLocalToGlobalMapping directly */

 10: PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat mat)
 11: {
 12:   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
 13:   Mat_SeqAIJ     *B   = (Mat_SeqAIJ*)(aij->B->data);
 15:   PetscInt       i,j,*aj = B->j,ec = 0,*garray;
 16:   IS             from,to;
 17:   Vec            gvec;
 18: #if defined(PETSC_USE_CTABLE)
 19:   PetscTable         gid1_lid1;
 20:   PetscTablePosition tpos;
 21:   PetscInt           gid,lid;
 22: #else
 23:   PetscInt N = mat->cmap->N,*indices;
 24: #endif

 27: #if defined(PETSC_USE_CTABLE)
 28:   /* use a table */
 29:   PetscTableCreate(aij->B->rmap->n,mat->cmap->N+1,&gid1_lid1);
 30:   for (i=0; i<aij->B->rmap->n; i++) {
 31:     for (j=0; j<B->ilen[i]; j++) {
 32:       PetscInt data,gid1 = aj[B->i[i] + j] + 1;
 33:       PetscTableFind(gid1_lid1,gid1,&data);
 34:       if (!data) {
 35:         /* one based table */
 36:         PetscTableAdd(gid1_lid1,gid1,++ec,INSERT_VALUES);
 37:       }
 38:     }
 39:   }
 40:   /* form array of columns we need */
 41:   PetscMalloc((ec+1)*sizeof(PetscInt),&garray);
 42:   PetscTableGetHeadPosition(gid1_lid1,&tpos);
 43:   while (tpos) {
 44:     PetscTableGetNext(gid1_lid1,&tpos,&gid,&lid);
 45:     gid--;
 46:     lid--;
 47:     garray[lid] = gid;
 48:   }
 49:   PetscSortInt(ec,garray); /* sort, and rebuild */
 50:   PetscTableRemoveAll(gid1_lid1);
 51:   for (i=0; i<ec; i++) {
 52:     PetscTableAdd(gid1_lid1,garray[i]+1,i+1,INSERT_VALUES);
 53:   }
 54:   /* compact out the extra columns in B */
 55:   for (i=0; i<aij->B->rmap->n; i++) {
 56:     for (j=0; j<B->ilen[i]; j++) {
 57:       PetscInt gid1 = aj[B->i[i] + j] + 1;
 58:       PetscTableFind(gid1_lid1,gid1,&lid);
 59:       lid--;
 60:       aj[B->i[i] + j] = lid;
 61:     }
 62:   }
 63:   aij->B->cmap->n = aij->B->cmap->N = ec;

 65:   PetscLayoutSetUp((aij->B->cmap));
 66:   PetscTableDestroy(&gid1_lid1);
 67: #else
 68:   /* Make an array as long as the number of columns */
 69:   /* mark those columns that are in aij->B */
 70:   PetscMalloc((N+1)*sizeof(PetscInt),&indices);
 71:   PetscMemzero(indices,N*sizeof(PetscInt));
 72:   for (i=0; i<aij->B->rmap->n; i++) {
 73:     for (j=0; j<B->ilen[i]; j++) {
 74:       if (!indices[aj[B->i[i] + j]]) ec++;
 75:       indices[aj[B->i[i] + j]] = 1;
 76:     }
 77:   }

 79:   /* form array of columns we need */
 80:   PetscMalloc((ec+1)*sizeof(PetscInt),&garray);
 81:   ec   = 0;
 82:   for (i=0; i<N; i++) {
 83:     if (indices[i]) garray[ec++] = i;
 84:   }

 86:   /* make indices now point into garray */
 87:   for (i=0; i<ec; i++) {
 88:     indices[garray[i]] = i;
 89:   }

 91:   /* compact out the extra columns in B */
 92:   for (i=0; i<aij->B->rmap->n; i++) {
 93:     for (j=0; j<B->ilen[i]; j++) {
 94:       aj[B->i[i] + j] = indices[aj[B->i[i] + j]];
 95:     }
 96:   }
 97:   aij->B->cmap->n = aij->B->cmap->N = ec;

 99:   PetscLayoutSetUp((aij->B->cmap));
100:   PetscFree(indices);
101: #endif
102:   /* create local vector that is used to scatter into */
103:   VecCreateSeq(PETSC_COMM_SELF,ec,&aij->lvec);

105:   /* create two temporary Index sets for build scatter gather */
106:   ISCreateGeneral(((PetscObject)mat)->comm,ec,garray,PETSC_COPY_VALUES,&from);

108:   ISCreateStride(PETSC_COMM_SELF,ec,0,1,&to);

110:   /* create temporary global vector to generate scatter context */
111:   /* This does not allocate the array's memory so is efficient */
112:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),1,mat->cmap->n,mat->cmap->N,NULL,&gvec);

114:   /* generate the scatter context */
115:   VecScatterCreate(gvec,from,aij->lvec,to,&aij->Mvctx);
116:   PetscLogObjectParent(mat,aij->Mvctx);
117:   PetscLogObjectParent(mat,aij->lvec);
118:   PetscLogObjectParent(mat,from);
119:   PetscLogObjectParent(mat,to);

121:   aij->garray = garray;

123:   PetscLogObjectMemory(mat,(ec+1)*sizeof(PetscInt));
124:   ISDestroy(&from);
125:   ISDestroy(&to);
126:   VecDestroy(&gvec);
127:   return(0);
128: }


133: /*
134:      Takes the local part of an already assembled MPIAIJ matrix
135:    and disassembles it. This is to allow new nonzeros into the matrix
136:    that require more communication in the matrix vector multiply.
137:    Thus certain data-structures must be rebuilt.

139:    Kind of slow! But that's what application programmers get when
140:    they are sloppy.
141: */
142: PetscErrorCode MatDisAssemble_MPIAIJ(Mat A)
143: {
144:   Mat_MPIAIJ     *aij  = (Mat_MPIAIJ*)A->data;
145:   Mat            B     = aij->B,Bnew;
146:   Mat_SeqAIJ     *Baij = (Mat_SeqAIJ*)B->data;
148:   PetscInt       i,j,m = B->rmap->n,n = A->cmap->N,col,ct = 0,*garray = aij->garray,*nz,ec;
149:   PetscScalar    v;

152:   /* free stuff related to matrix-vec multiply */
153:   VecGetSize(aij->lvec,&ec); /* needed for PetscLogObjectMemory below */
154:   VecDestroy(&aij->lvec);
155:   VecScatterDestroy(&aij->Mvctx);
156:   if (aij->colmap) {
157: #if defined(PETSC_USE_CTABLE)
158:     PetscTableDestroy(&aij->colmap);
159: #else
160:     PetscFree(aij->colmap);
161:     PetscLogObjectMemory(A,-aij->B->cmap->n*sizeof(PetscInt));
162: #endif
163:   }

165:   /* make sure that B is assembled so we can access its values */
166:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
167:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);

169:   /* invent new B and copy stuff over */
170:   PetscMalloc((m+1)*sizeof(PetscInt),&nz);
171:   for (i=0; i<m; i++) {
172:     nz[i] = Baij->i[i+1] - Baij->i[i];
173:   }
174:   MatCreate(PETSC_COMM_SELF,&Bnew);
175:   MatSetSizes(Bnew,m,n,m,n);
176:   MatSetBlockSizes(Bnew,A->rmap->bs,A->cmap->bs);
177:   MatSetType(Bnew,((PetscObject)B)->type_name);
178:   MatSeqAIJSetPreallocation(Bnew,0,nz);

180:   ((Mat_SeqAIJ*)Bnew->data)->nonew = Baij->nonew; /* Inherit insertion error options. */

182:   PetscFree(nz);
183:   for (i=0; i<m; i++) {
184:     for (j=Baij->i[i]; j<Baij->i[i+1]; j++) {
185:       col  = garray[Baij->j[ct]];
186:       v    = Baij->a[ct++];
187:       MatSetValues(Bnew,1,&i,1,&col,&v,B->insertmode);
188:     }
189:   }
190:   PetscFree(aij->garray);
191:   PetscLogObjectMemory(A,-ec*sizeof(PetscInt));
192:   MatDestroy(&B);
193:   PetscLogObjectParent(A,Bnew);

195:   aij->B           = Bnew;
196:   A->was_assembled = PETSC_FALSE;
197:   return(0);
198: }

200: /*      ugly stuff added for Glenn someday we should fix this up */

202: static PetscInt *auglyrmapd = 0,*auglyrmapo = 0; /* mapping from the local ordering to the "diagonal" and "off-diagonal" parts of the local matrix */
203: static Vec auglydd          = 0,auglyoo     = 0; /* work vectors used to scale the two parts of the local matrix */


208: PetscErrorCode MatMPIAIJDiagonalScaleLocalSetUp(Mat inA,Vec scale)
209: {
210:   Mat_MPIAIJ     *ina = (Mat_MPIAIJ*) inA->data; /*access private part of matrix */
212:   PetscInt       i,n,nt,cstart,cend,no,*garray = ina->garray,*lindices;
213:   PetscInt       *r_rmapd,*r_rmapo;

216:   MatGetOwnershipRange(inA,&cstart,&cend);
217:   MatGetSize(ina->A,NULL,&n);
218:   PetscMalloc((inA->rmap->mapping->n+1)*sizeof(PetscInt),&r_rmapd);
219:   PetscMemzero(r_rmapd,inA->rmap->mapping->n*sizeof(PetscInt));
220:   nt   = 0;
221:   for (i=0; i<inA->rmap->mapping->n; i++) {
222:     if (inA->rmap->mapping->indices[i] >= cstart && inA->rmap->mapping->indices[i] < cend) {
223:       nt++;
224:       r_rmapd[i] = inA->rmap->mapping->indices[i] + 1;
225:     }
226:   }
227:   if (nt != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hmm nt %D n %D",nt,n);
228:   PetscMalloc((n+1)*sizeof(PetscInt),&auglyrmapd);
229:   for (i=0; i<inA->rmap->mapping->n; i++) {
230:     if (r_rmapd[i]) {
231:       auglyrmapd[(r_rmapd[i]-1)-cstart] = i;
232:     }
233:   }
234:   PetscFree(r_rmapd);
235:   VecCreateSeq(PETSC_COMM_SELF,n,&auglydd);

237:   PetscMalloc((inA->cmap->N+1)*sizeof(PetscInt),&lindices);
238:   PetscMemzero(lindices,inA->cmap->N*sizeof(PetscInt));
239:   for (i=0; i<ina->B->cmap->n; i++) {
240:     lindices[garray[i]] = i+1;
241:   }
242:   no   = inA->rmap->mapping->n - nt;
243:   PetscMalloc((inA->rmap->mapping->n+1)*sizeof(PetscInt),&r_rmapo);
244:   PetscMemzero(r_rmapo,inA->rmap->mapping->n*sizeof(PetscInt));
245:   nt   = 0;
246:   for (i=0; i<inA->rmap->mapping->n; i++) {
247:     if (lindices[inA->rmap->mapping->indices[i]]) {
248:       nt++;
249:       r_rmapo[i] = lindices[inA->rmap->mapping->indices[i]];
250:     }
251:   }
252:   if (nt > no) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hmm nt %D no %D",nt,n);
253:   PetscFree(lindices);
254:   PetscMalloc((nt+1)*sizeof(PetscInt),&auglyrmapo);
255:   for (i=0; i<inA->rmap->mapping->n; i++) {
256:     if (r_rmapo[i]) {
257:       auglyrmapo[(r_rmapo[i]-1)] = i;
258:     }
259:   }
260:   PetscFree(r_rmapo);
261:   VecCreateSeq(PETSC_COMM_SELF,nt,&auglyoo);
262:   return(0);
263: }

267: PetscErrorCode MatMPIAIJDiagonalScaleLocal(Mat A,Vec scale)
268: {
269:   /* This routine should really be abandoned as it duplicates MatDiagonalScaleLocal */

273:   PetscTryMethod(A,"MatDiagonalScaleLocal_C",(Mat,Vec),(A,scale));
274:   return(0);
275: }

279: PetscErrorCode  MatDiagonalScaleLocal_MPIAIJ(Mat A,Vec scale)
280: {
281:   Mat_MPIAIJ     *a = (Mat_MPIAIJ*) A->data; /*access private part of matrix */
283:   PetscInt       n,i;
284:   PetscScalar    *d,*o,*s;

287:   if (!auglyrmapd) {
288:     MatMPIAIJDiagonalScaleLocalSetUp(A,scale);
289:   }

291:   VecGetArray(scale,&s);

293:   VecGetLocalSize(auglydd,&n);
294:   VecGetArray(auglydd,&d);
295:   for (i=0; i<n; i++) {
296:     d[i] = s[auglyrmapd[i]]; /* copy "diagonal" (true local) portion of scale into dd vector */
297:   }
298:   VecRestoreArray(auglydd,&d);
299:   /* column scale "diagonal" portion of local matrix */
300:   MatDiagonalScale(a->A,NULL,auglydd);

302:   VecGetLocalSize(auglyoo,&n);
303:   VecGetArray(auglyoo,&o);
304:   for (i=0; i<n; i++) {
305:     o[i] = s[auglyrmapo[i]]; /* copy "off-diagonal" portion of scale into oo vector */
306:   }
307:   VecRestoreArray(scale,&s);
308:   VecRestoreArray(auglyoo,&o);
309:   /* column scale "off-diagonal" portion of local matrix */
310:   MatDiagonalScale(a->B,NULL,auglyoo);
311:   return(0);
312: }