Actual source code: clique.cxx
petsc-3.4.2 2013-07-02
1: #include <../src/mat/impls/aij/mpi/clique/matcliqueimpl.h> /*I "petscmat.h" I*/
2: /*
3: Provides an interface to the Clique sparse solver (http://poulson.github.com/Clique/)
4: */
8: PetscErrorCode PetscCliqueFinalizePackage(void)
9: {
11: cliq::Finalize();
12: return(0);
13: }
17: PetscErrorCode PetscCliqueInitializePackage(void)
18: {
22: if (cliq::Initialized()) return(0);
23: { /* We have already initialized MPI, so this song and dance is just to pass these variables (which won't be used by Clique) through the interface that needs references */
24: int zero = 0;
25: char **nothing = 0;
26: cliq::Initialize(zero,nothing);
27: }
28: PetscRegisterFinalize(PetscCliqueFinalizePackage);
29: return(0);
30: }
32: /*
33: MatConvertToClique: Convert Petsc aij matrix to Clique matrix
35: input:
36: + A - matrix in seqaij or mpiaij format
37: - reuse - denotes if the destination matrix is to be created or reused. Currently
38: MAT_REUSE_MATRIX is only supported for inplace conversion, otherwise use MAT_INITIAL_MATRIX.
40: output:
41: . cliq - Clique context
42: */
45: PetscErrorCode MatConvertToClique(Mat A,MatReuse reuse,Mat_Clique *cliq)
46: {
47: PetscErrorCode ierr;
48: PetscInt i,j,rstart,rend,ncols;
49: const PetscInt *cols;
50: const PetscCliqScalar *vals;
51: cliq::DistSparseMatrix<PetscCliqScalar> *cmat;
54: if (reuse == MAT_INITIAL_MATRIX){
55: /* create Clique matrix */
56: cmat = new cliq::DistSparseMatrix<PetscCliqScalar>(A->rmap->N,cliq->cliq_comm);
57: cliq->cmat = cmat;
58: } else {
59: cmat = cliq->cmat;
60: }
61: /* fill matrix values */
62: MatGetOwnershipRange(A,&rstart,&rend);
63: const int firstLocalRow = cmat->FirstLocalRow();
64: const int localHeight = cmat->LocalHeight();
65: if (rstart != firstLocalRow || rend-rstart != localHeight) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"matrix rowblock distribution does not match");
67: cmat->StartAssembly();
68: //cmat->Reserve( 7*localHeight ); ???
69: for (i=rstart; i<rend; i++){
70: MatGetRow(A,i,&ncols,&cols,&vals);
71: for (j=0; j<ncols; j++){
72: cmat->Update(i,cols[j],vals[j]);
73: }
74: MatRestoreRow(A,i,&ncols,&cols,&vals);
75: }
76: cmat->StopAssembly();
77: return(0);
78: }
82: static PetscErrorCode MatMult_Clique(Mat A,Vec X,Vec Y)
83: {
84: PetscErrorCode ierr;
85: PetscInt i;
86: const PetscCliqScalar *x;
87: PetscCliqScalar *y;
88: Mat_Clique *cliq=(Mat_Clique*)A->spptr;
89: cliq::DistSparseMatrix<PetscCliqScalar> *cmat=cliq->cmat;
90: cliq::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
93: if (!cmat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Clique matrix cmat is not created yet");
94: VecGetArrayRead(X,(const PetscScalar **)&x);
96: cliq::DistVector<PetscCliqScalar> xc(A->cmap->N,cxxcomm);
97: cliq::DistVector<PetscCliqScalar> yc(A->rmap->N,cxxcomm);
98: for (i=0; i<A->cmap->n; i++) {
99: xc.SetLocal(i,x[i]);
100: }
101: cliq::Multiply(1.0,*cmat,xc,0.0,yc);
102: VecRestoreArrayRead(X,(const PetscScalar **)&x);
104: for (i=0; i<A->cmap->n; i++) {
105: VecSetValueLocal(Y,i,yc.GetLocal(i),INSERT_VALUES);
106: }
107: VecAssemblyBegin(Y);
108: VecAssemblyEnd(Y);
109: return(0);
110: }
114: PetscErrorCode MatView_Clique(Mat A,PetscViewer viewer)
115: {
117: PetscBool iascii;
120: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
121: if (iascii) {
122: PetscViewerFormat format;
123: PetscViewerGetFormat(viewer,&format);
124: if (format == PETSC_VIEWER_ASCII_INFO) {
125: PetscViewerASCIIPrintf(viewer,"Clique run parameters:\n");
126: } else if (format == PETSC_VIEWER_DEFAULT) { /* matrix A is factored matrix, remove this block */
127: Mat Aaij;
128: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
129: PetscObjectPrintClassNamePrefixType((PetscObject)A,viewer,"Matrix Object");
130: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
131: PetscPrintf(PetscObjectComm((PetscObject)viewer),"Clique matrix\n");
132: MatComputeExplicitOperator(A,&Aaij);
133: MatView(Aaij,PETSC_VIEWER_STDOUT_WORLD);
134: MatDestroy(&Aaij);
135: } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
136: }
137: return(0);
138: }
142: PetscErrorCode MatDestroy_Clique(Mat A)
143: {
145: Mat_Clique *cliq=(Mat_Clique*)A->spptr;
148: printf("MatDestroy_Clique ...\n");
149: if (cliq && cliq->CleanUpClique) {
150: /* Terminate instance, deallocate memories */
151: printf("MatDestroy_Clique ... destroy clique struct \n");
152: PetscCommDestroy(&(cliq->cliq_comm));
153: // free cmat here
154: delete cliq->cmat;
155: delete cliq->frontTree;
156: delete cliq->rhs;
157: delete cliq->xNodal;
158: delete cliq->info;
159: delete cliq->inverseMap;
160: }
161: if (cliq && cliq->Destroy) {
162: cliq->Destroy(A);
163: }
164: PetscFree(A->spptr);
166: /* clear composed functions */
167: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);
169: return(0);
170: }
174: PetscErrorCode MatSolve_Clique(Mat A,Vec B,Vec X)
175: {
176: PetscErrorCode ierr;
177: PetscInt i,rank;
178: const PetscCliqScalar *b;
179: Mat_Clique *cliq=(Mat_Clique*)A->spptr;
180: cliq::DistVector<PetscCliqScalar> *bc=cliq->rhs;
181: cliq::DistNodalVector<PetscCliqScalar> *xNodal=cliq->xNodal;
184: VecGetArrayRead(B,(const PetscScalar **)&b);
185: for (i=0; i<A->rmap->n; i++) {
186: bc->SetLocal(i,b[i]);
187: }
188: VecRestoreArrayRead(B,(const PetscScalar **)&b);
190: xNodal->Pull( *cliq->inverseMap, *cliq->info, *bc );
191: cliq::Solve( *cliq->info, *cliq->frontTree, xNodal->localVec );
192: xNodal->Push( *cliq->inverseMap, *cliq->info, *bc );
194: MPI_Comm_rank(cliq->cliq_comm,&rank);
195: for (i=0; i<bc->LocalHeight(); i++) {
196: VecSetValue(X,rank*bc->Blocksize()+i,bc->GetLocal(i),INSERT_VALUES);
197: }
198: VecAssemblyBegin(X);
199: VecAssemblyEnd(X);
200: return(0);
201: }
205: PetscErrorCode MatCholeskyFactorNumeric_Clique(Mat F,Mat A,const MatFactorInfo *info)
206: {
207: PetscErrorCode ierr;
208: Mat_Clique *cliq=(Mat_Clique*)F->spptr;
209: cliq::DistSparseMatrix<PetscCliqScalar> *cmat;
212: cmat = cliq->cmat;
213: if (cliq->matstruc == SAME_NONZERO_PATTERN){ /* successing numerical factorization */
214: /* Update cmat */
215: MatConvertToClique(A,MAT_REUSE_MATRIX,cliq);
216: }
218: /* Numeric factorization */
219: cliq::LDL( *cliq->info, *cliq->frontTree, cliq::LDL_1D);
220: //L.frontType = cliq::SYMM_2D;
222: // refactor
223: //cliq::ChangeFrontType( *cliq->frontTree, cliq::LDL_2D );
224: //*(cliq->frontTree.frontType) = cliq::LDL_2D;
225: //cliq::LDL( *cliq->info, *cliq->frontTree, cliq::LDL_2D );
227: cliq->matstruc = SAME_NONZERO_PATTERN;
228: F->assembled = PETSC_TRUE;
229: return(0);
230: }
234: PetscErrorCode MatCholeskyFactorSymbolic_Clique(Mat F,Mat A,IS r,const MatFactorInfo *info)
235: {
236: PetscErrorCode ierr;
237: Mat_Clique *Acliq=(Mat_Clique*)F->spptr;
238: cliq::DistSparseMatrix<PetscCliqScalar> *cmat;
239: cliq::DistSeparatorTree sepTree;
240: cliq::DistMap map;
243: /* Convert A to Aclique */
244: MatConvertToClique(A,MAT_INITIAL_MATRIX,Acliq);
245: cmat = Acliq->cmat;
247: cliq::NestedDissection( cmat->Graph(), map, sepTree, *Acliq->info, PETSC_TRUE, Acliq->numDistSeps, Acliq->numSeqSeps, Acliq->cutoff);
248: map.FormInverse( *Acliq->inverseMap );
249: Acliq->frontTree = new cliq::DistSymmFrontTree<PetscCliqScalar>( cliq::TRANSPOSE, *cmat, map, sepTree, *Acliq->info );
251: Acliq->matstruc = DIFFERENT_NONZERO_PATTERN;
252: Acliq->CleanUpClique = PETSC_TRUE;
253: return(0);
254: }
256: /*MC
257: MATSOLVERCLIQUE - A solver package providing direct solvers for distributed
258: and sequential matrices via the external package Clique.
260: Use ./configure --download-clique to have PETSc installed with Clique
262: Options Database Keys:
263: + -mat_clique_ -
264: - -mat_clique_ <integer> -
266: Level: beginner
268: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
270: M*/
274: PetscErrorCode MatFactorGetSolverPackage_Clique(Mat A,const MatSolverPackage *type)
275: {
277: *type = MATSOLVERCLIQUE;
278: return(0);
279: }
283: PETSC_EXTERN PetscErrorCode MatGetFactor_aij_clique(Mat A,MatFactorType ftype,Mat *F)
284: {
285: Mat B;
286: Mat_Clique *cliq;
290: PetscCliqueInitializePackage();
291: MatCreate(PetscObjectComm((PetscObject)A),&B);
292: MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DETERMINE,PETSC_DETERMINE);
293: MatSetType(B,((PetscObject)A)->type_name);
294: MatSetUp(B);
296: if (ftype == MAT_FACTOR_CHOLESKY){
297: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_Clique;
298: B->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_Clique;
299: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor type not supported");
301: PetscNewLog(B,Mat_Clique,&cliq);
302: B->spptr = (void*)cliq;
303: cliq::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
304: PetscCommDuplicate(cxxcomm,&(cliq->cliq_comm),NULL);
305: cliq->rhs = new cliq::DistVector<PetscCliqScalar>(A->rmap->N,cliq->cliq_comm);
306: cliq->xNodal = new cliq::DistNodalVector<PetscCliqScalar>();
307: cliq->info = new cliq::DistSymmInfo;
308: cliq->inverseMap = new cliq::DistMap;
309: cliq->CleanUpClique = PETSC_FALSE;
310: cliq->Destroy = B->ops->destroy;
312: B->ops->view = MatView_Clique;
313: B->ops->mult = MatMult_Clique; /* for cliq->cmat */
314: B->ops->solve = MatSolve_Clique;
316: B->ops->destroy = MatDestroy_Clique;
317: B->factortype = ftype;
318: B->assembled = PETSC_FALSE;
320: /* Set Clique options */
321: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Clique Options","Mat");
322: cliq->cutoff = 128; /* maximum size of leaf node */
323: cliq->numDistSeps = 1; /* number of distributed separators to try */
324: cliq->numSeqSeps = 1; /* number of sequential separators to try */
325: PetscOptionsEnd();
327: *F = B;
328: return(0);
329: }