Actual source code: chaco.c
petsc-3.4.2 2013-07-02
2: #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
4: #if defined(PETSC_HAVE_UNISTD_H)
5: #include <unistd.h>
6: #endif
8: /* Chaco does not have an include file */
9: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
10: float *ewgts, float *x, float *y, float *z, char *outassignname,
11: char *outfilename, short *assignment, int architecture, int ndims_tot,
12: int mesh_dims[3], double *goal, int global_method, int local_method,
13: int rqi_flag, int vmax, int ndims, double eigtol, long seed);
15: extern int FREE_GRAPH;
17: /*
18: int nvtxs; number of vertices in full graph
19: int *start; start of edge list for each vertex
20: int *adjacency; edge list data
21: int *vwgts; weights for all vertices
22: float *ewgts; weights for all edges
23: float *x, *y, *z; coordinates for inertial method
24: char *outassignname; name of assignment output file
25: char *outfilename; output file name
26: short *assignment; set number of each vtx (length n)
27: int architecture; 0 => hypercube, d => d-dimensional mesh
28: int ndims_tot; total number of cube dimensions to divide
29: int mesh_dims[3]; dimensions of mesh of processors
30: double *goal; desired set sizes for each set
31: int global_method; global partitioning algorithm
32: int local_method; local partitioning algorithm
33: int rqi_flag; should I use RQI/Symmlq eigensolver?
34: int vmax; how many vertices to coarsen down to?
35: int ndims; number of eigenvectors (2^d sets)
36: double eigtol; tolerance on eigenvectors
37: long seed; for random graph mutations
38: */
40: typedef struct {
41: PetscBool verbose;
42: PetscInt eignum;
43: PetscReal eigtol;
44: MPChacoGlobalType global_method; /* global method */
45: MPChacoLocalType local_method; /* local method */
46: MPChacoEigenType eigen_method; /* eigensolver */
47: PetscInt nbvtxcoarsed; /* number of vertices for the coarse graph */
48: } MatPartitioning_Chaco;
50: #define SIZE_LOG 10000 /* size of buffer for mesg_log */
54: static PetscErrorCode MatPartitioningApply_Chaco(MatPartitioning part,IS *partitioning)
55: {
56: PetscErrorCode ierr;
57: PetscInt *parttab,*locals,i,nb_locals,M,N;
58: PetscMPIInt size,rank;
59: Mat mat = part->adj,matAdj,matSeq,*A;
60: Mat_MPIAdj *adj;
61: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
62: PetscBool flg;
63: IS isrow, iscol;
64: int nvtxs,*start,*adjacency,*vwgts,architecture,ndims_tot;
65: int mesh_dims[3],global_method,local_method,rqi_flag,vmax,ndims;
66: short *assignment;
67: double eigtol;
68: long seed;
69: char *mesg_log;
70: #if defined(PETSC_HAVE_UNISTD_H)
71: int fd_stdout,fd_pipe[2],count,err;
72: #endif
75: FREE_GRAPH = 0; /* otherwise Chaco will attempt to free memory for adjacency graph */
76: MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);
77: MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);
78: PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
79: if (size>1) {
80: if (flg) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Distributed matrix format MPIAdj is not supported for sequential partitioners");
81: PetscInfo(part,"Converting distributed matrix to sequential: this could be a performance loss\n");
82: MatGetSize(mat,&M,&N);
83: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
84: ISCreateStride(PETSC_COMM_SELF,N,0,1,&iscol);
85: MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&A);
86: ISDestroy(&isrow);
87: ISDestroy(&iscol);
88: matSeq = *A;
89: PetscFree(A);
90: } else {
91: PetscObjectReference((PetscObject)mat);
92: matSeq = mat;
93: }
95: if (!flg) { /* convert regular matrix to MPIADJ */
96: MatConvert(matSeq,MATMPIADJ,MAT_INITIAL_MATRIX,&matAdj);
97: } else {
98: PetscObjectReference((PetscObject)matSeq);
99: matAdj = matSeq;
100: }
102: adj = (Mat_MPIAdj*)matAdj->data; /* finaly adj contains adjacency graph */
104: /* arguments for Chaco library */
105: nvtxs = mat->rmap->N; /* number of vertices in full graph */
106: start = adj->i; /* start of edge list for each vertex */
107: vwgts = part->vertex_weights; /* weights for all vertices */
108: architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */
109: ndims_tot = 0; /* total number of cube dimensions to divide */
110: mesh_dims[0] = part->n; /* dimensions of mesh of processors */
111: global_method = chaco->global_method; /* global partitioning algorithm */
112: local_method = chaco->local_method; /* local partitioning algorithm */
113: rqi_flag = chaco->eigen_method; /* should I use RQI/Symmlq eigensolver? */
114: vmax = chaco->nbvtxcoarsed; /* how many vertices to coarsen down to? */
115: ndims = chaco->eignum; /* number of eigenvectors (2^d sets) */
116: eigtol = chaco->eigtol; /* tolerance on eigenvectors */
117: seed = 123636512; /* for random graph mutations */
119: PetscMalloc((mat->rmap->N)*sizeof(short),&assignment);
120: PetscMalloc(sizeof(int)*start[nvtxs],&adjacency);
121: for (i=0; i<start[nvtxs]; i++) adjacency[i] = (adj->j)[i] + 1; /* 1-based indexing */
123: /* redirect output to buffer */
124: #if defined(PETSC_HAVE_UNISTD_H)
125: fd_stdout = dup(1);
126: if (pipe(fd_pipe)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not open pipe");
127: close(1);
128: dup2(fd_pipe[1],1);
129: PetscMalloc(SIZE_LOG*sizeof(char),&mesg_log);
130: #endif
132: /* library call */
133: interface(nvtxs,start,adjacency,vwgts,NULL,NULL,NULL,NULL,
134: NULL,NULL,assignment,architecture,ndims_tot,mesh_dims,
135: NULL,global_method,local_method,rqi_flag,vmax,ndims,eigtol,seed);
137: #if defined(PETSC_HAVE_UNISTD_H)
138: err = fflush(stdout);
139: if (err) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"fflush() failed on stdout");
140: count = read(fd_pipe[0],mesg_log,(SIZE_LOG-1)*sizeof(char));
141: if (count<0) count = 0;
142: mesg_log[count] = 0;
143: close(1);
144: dup2(fd_stdout,1);
145: close(fd_stdout);
146: close(fd_pipe[0]);
147: close(fd_pipe[1]);
148: if (chaco->verbose) {
149: PetscPrintf(PetscObjectComm((PetscObject)mat),mesg_log);
150: }
151: PetscFree(mesg_log);
152: #endif
153: if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Chaco failed");
155: PetscMalloc((mat->rmap->N)*sizeof(PetscInt),&parttab);
156: for (i=0; i<nvtxs; i++) parttab[i] = assignment[i];
158: /* creation of the index set */
159: nb_locals = mat->rmap->N / size;
160: locals = parttab + rank*nb_locals;
161: if (rank < mat->rmap->N % size) {
162: nb_locals++;
163: locals += rank;
164: } else locals += mat->rmap->N % size;
166: ISCreateGeneral(PetscObjectComm((PetscObject)part),nb_locals,locals,PETSC_COPY_VALUES,partitioning);
168: /* clean up */
169: PetscFree(parttab);
170: PetscFree(adjacency);
171: PetscFree(assignment);
172: MatDestroy(&matSeq);
173: MatDestroy(&matAdj);
174: return(0);
175: }
179: PetscErrorCode MatPartitioningView_Chaco(MatPartitioning part, PetscViewer viewer)
180: {
181: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
182: PetscErrorCode ierr;
183: PetscBool isascii;
186: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
187: if (isascii) {
188: PetscViewerASCIIPrintf(viewer," Global method: %s\n",MPChacoGlobalTypes[chaco->global_method]);
189: PetscViewerASCIIPrintf(viewer," Local method: %s\n",MPChacoLocalTypes[chaco->local_method]);
190: PetscViewerASCIIPrintf(viewer," Number of vertices for the coarse graph: %d\n",chaco->nbvtxcoarsed);
191: PetscViewerASCIIPrintf(viewer," Eigensolver: %s\n",MPChacoEigenTypes[chaco->eigen_method]);
192: PetscViewerASCIIPrintf(viewer," Tolerance for eigensolver: %g\n",chaco->eigtol);
193: PetscViewerASCIIPrintf(viewer," Number of eigenvectors: %d\n",chaco->eignum);
194: }
195: return(0);
196: }
200: /*@
201: MatPartitioningChacoSetGlobal - Set global method for Chaco partitioner.
203: Collective on MatPartitioning
205: Input Parameters:
206: + part - the partitioning context
207: - method - one of MP_CHACO_MULTILEVEL, MP_CHACO_SPECTRAL, MP_CHACO_LINEAR,
208: MP_CHACO_RANDOM or MP_CHACO_SCATTERED
210: Options Database:
211: . -mat_partitioning_chaco_global <method> - the global method
213: Level: advanced
215: Notes:
216: The default is the multi-level method. See Chaco documentation for
217: additional details.
219: .seealso: MatPartitioningChacoSetLocal(),MatPartitioningChacoGetGlobal()
220: @*/
221: PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning part,MPChacoGlobalType method)
222: {
228: PetscTryMethod(part,"MatPartitioningChacoSetGlobal_C",(MatPartitioning,MPChacoGlobalType),(part,method));
229: return(0);
230: }
234: PetscErrorCode MatPartitioningChacoSetGlobal_Chaco(MatPartitioning part,MPChacoGlobalType method)
235: {
236: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
239: switch (method) {
240: case MP_CHACO_MULTILEVEL:
241: case MP_CHACO_SPECTRAL:
242: case MP_CHACO_LINEAR:
243: case MP_CHACO_RANDOM:
244: case MP_CHACO_SCATTERED:
245: chaco->global_method = method; break;
246: default:
247: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Chaco: Unknown or unsupported option");
248: }
249: return(0);
250: }
254: /*@
255: MatPartitioningChacoGetGlobal - Get global method for Chaco partitioner.
257: Not Collective
259: Input Parameter:
260: . part - the partitioning context
262: Output Parameter:
263: . method - the method
265: Level: advanced
267: .seealso: MatPartitioningChacoSetGlobal()
268: @*/
269: PetscErrorCode MatPartitioningChacoGetGlobal(MatPartitioning part,MPChacoGlobalType *method)
270: {
276: PetscTryMethod(part,"MatPartitioningChacoGetGlobal_C",(MatPartitioning,MPChacoGlobalType*),(part,method));
277: return(0);
278: }
282: PetscErrorCode MatPartitioningChacoGetGlobal_Chaco(MatPartitioning part,MPChacoGlobalType *method)
283: {
284: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
287: *method = chaco->global_method;
288: return(0);
289: }
293: /*@
294: MatPartitioningChacoSetLocal - Set local method for Chaco partitioner.
296: Collective on MatPartitioning
298: Input Parameters:
299: + part - the partitioning context
300: - method - one of MP_CHACO_KERNIGHAN or MP_CHACO_NONE
302: Options Database:
303: . -mat_partitioning_chaco_local <method> - the local method
305: Level: advanced
307: Notes:
308: The default is to apply the Kernighan-Lin heuristic. See Chaco documentation
309: for additional details.
311: .seealso: MatPartitioningChacoSetGlobal(),MatPartitioningChacoGetLocal()
312: @*/
313: PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning part,MPChacoLocalType method)
314: {
320: PetscTryMethod(part,"MatPartitioningChacoSetLocal_C",(MatPartitioning,MPChacoLocalType),(part,method));
321: return(0);
322: }
326: PetscErrorCode MatPartitioningChacoSetLocal_Chaco(MatPartitioning part,MPChacoLocalType method)
327: {
328: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
331: if (method==PETSC_DEFAULT) chaco->local_method = MP_CHACO_KERNIGHAN;
332: else {
333: switch (method) {
334: case MP_CHACO_KERNIGHAN:
335: case MP_CHACO_NONE:
336: chaco->local_method = method; break;
337: default:
338: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Chaco: Unknown or unsupported option");
339: }
340: }
341: return(0);
342: }
346: /*@
347: MatPartitioningChacoGetLocal - Get local method for Chaco partitioner.
349: Not Collective
351: Input Parameter:
352: . part - the partitioning context
354: Output Parameter:
355: . method - the method
357: Level: advanced
359: .seealso: MatPartitioningChacoSetLocal()
360: @*/
361: PetscErrorCode MatPartitioningChacoGetLocal(MatPartitioning part,MPChacoLocalType *method)
362: {
368: PetscTryMethod(part,"MatPartitioningChacoGetLocal_C",(MatPartitioning,MPChacoLocalType*),(part,method));
369: return(0);
370: }
374: PetscErrorCode MatPartitioningChacoGetLocal_Chaco(MatPartitioning part,MPChacoLocalType *method)
375: {
376: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
379: *method = chaco->local_method;
380: return(0);
381: }
385: /*@
386: MatPartitioningChacoSetCoarseLevel - Set the coarse level parameter for the
387: Chaco partitioner.
389: Collective on MatPartitioning
391: Input Parameters:
392: + part - the partitioning context
393: - level - the coarse level in range [0.0,1.0]
395: Options Database:
396: . -mat_partitioning_chaco_coarse <l> - Coarse level
398: Level: advanced
399: @*/
400: PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning part,PetscReal level)
401: {
407: PetscTryMethod(part,"MatPartitioningChacoSetCoarseLevel_C",(MatPartitioning,PetscReal),(part,level));
408: return(0);
409: }
413: PetscErrorCode MatPartitioningChacoSetCoarseLevel_Chaco(MatPartitioning part,PetscReal level)
414: {
415: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
418: if (level<0.0 || level>1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Chaco: level of coarsening out of range [0.0-1.0]");
419: chaco->nbvtxcoarsed = (PetscInt)(part->adj->cmap->N * level);
420: if (chaco->nbvtxcoarsed < 20) chaco->nbvtxcoarsed = 20;
421: return(0);
422: }
426: /*@
427: MatPartitioningChacoSetEigenSolver - Set eigensolver method for Chaco partitioner.
429: Collective on MatPartitioning
431: Input Parameters:
432: + part - the partitioning context
433: - method - one of MP_CHACO_LANCZOS or MP_CHACO_RQI
435: Options Database:
436: . -mat_partitioning_chaco_eigen_solver <method> - the eigensolver
438: Level: advanced
440: Notes:
441: The default is to use a Lanczos method. See Chaco documentation for details.
443: .seealso: MatPartitioningChacoSetEigenTol(),MatPartitioningChacoSetEigenNumber(),
444: MatPartitioningChacoGetEigenSolver()
445: @*/
446: PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning part,MPChacoEigenType method)
447: {
453: PetscTryMethod(part,"MatPartitioningChacoSetEigenSolver_C",(MatPartitioning,MPChacoEigenType),(part,method));
454: return(0);
455: }
459: PetscErrorCode MatPartitioningChacoSetEigenSolver_Chaco(MatPartitioning part,MPChacoEigenType method)
460: {
461: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
464: switch (method) {
465: case MP_CHACO_LANCZOS:
466: case MP_CHACO_RQI:
467: chaco->eigen_method = method; break;
468: default:
469: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Chaco: Unknown or unsupported option");
470: }
471: return(0);
472: }
476: /*@
477: MatPartitioningChacoGetEigenSolver - Get local method for Chaco partitioner.
479: Not Collective
481: Input Parameter:
482: . part - the partitioning context
484: Output Parameter:
485: . method - the method
487: Level: advanced
489: .seealso: MatPartitioningChacoSetEigenSolver()
490: @*/
491: PetscErrorCode MatPartitioningChacoGetEigenSolver(MatPartitioning part,MPChacoEigenType *method)
492: {
498: PetscTryMethod(part,"MatPartitioningChacoGetEigenSolver_C",(MatPartitioning,MPChacoEigenType*),(part,method));
499: return(0);
500: }
504: PetscErrorCode MatPartitioningChacoGetEigenSolver_Chaco(MatPartitioning part,MPChacoEigenType *method)
505: {
506: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
509: *method = chaco->eigen_method;
510: return(0);
511: }
515: /*@
516: MatPartitioningChacoSetEigenTol - Sets the tolerance for the eigensolver.
518: Collective on MatPartitioning
520: Input Parameters:
521: + part - the partitioning context
522: - tol - the tolerance
524: Options Database:
525: . -mat_partitioning_chaco_eigen_tol <tol>: Tolerance for eigensolver
527: Note:
528: Must be positive. The default value is 0.001.
530: Level: advanced
532: .seealso: MatPartitioningChacoSetEigenSolver(), MatPartitioningChacoGetEigenTol()
533: @*/
534: PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning part,PetscReal tol)
535: {
541: PetscTryMethod(part,"MatPartitioningChacoSetEigenTol_C",(MatPartitioning,PetscReal),(part,tol));
542: return(0);
543: }
547: PetscErrorCode MatPartitioningChacoSetEigenTol_Chaco(MatPartitioning part,PetscReal tol)
548: {
549: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
552: if (tol==PETSC_DEFAULT) chaco->eigtol = 0.001;
553: else {
554: if (tol<=0.0) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_OUTOFRANGE,"Tolerance must be positive");
555: chaco->eigtol = tol;
556: }
557: return(0);
558: }
562: /*@
563: MatPartitioningChacoGetEigenTol - Gets the eigensolver tolerance.
565: Not Collective
567: Input Parameter:
568: . part - the partitioning context
570: Output Parameter:
571: . tol - the tolerance
573: Level: advanced
575: .seealso: MatPartitioningChacoSetEigenTol()
576: @*/
577: PetscErrorCode MatPartitioningChacoGetEigenTol(MatPartitioning part,PetscReal *tol)
578: {
584: PetscTryMethod(part,"MatPartitioningChacoGetEigenTol_C",(MatPartitioning,PetscReal*),(part,tol));
585: return(0);
586: }
590: PetscErrorCode MatPartitioningChacoGetEigenTol_Chaco(MatPartitioning part,PetscReal *tol)
591: {
592: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
595: *tol = chaco->eigtol;
596: return(0);
597: }
601: /*@
602: MatPartitioningChacoSetEigenNumber - Sets the number of eigenvectors to compute
603: during partitioning.
605: Collective on MatPartitioning
607: Input Parameters:
608: + part - the partitioning context
609: - num - the number of eigenvectors
611: Options Database:
612: . -mat_partitioning_chaco_eigen_number <n>: Number of eigenvectors
614: Note:
615: Accepted values are 1, 2 or 3, indicating partitioning by bisection,
616: quadrisection, or octosection.
618: Level: advanced
620: .seealso: MatPartitioningChacoSetEigenSolver(), MatPartitioningChacoGetEigenTol()
621: @*/
622: PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning part,PetscInt num)
623: {
629: PetscTryMethod(part,"MatPartitioningChacoSetEigenNumber_C",(MatPartitioning,PetscInt),(part,num));
630: return(0);
631: }
635: PetscErrorCode MatPartitioningChacoSetEigenNumber_Chaco(MatPartitioning part,PetscInt num)
636: {
637: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
640: if (num==PETSC_DEFAULT) chaco->eignum = 1;
641: else {
642: if (num<1 || num>3) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_OUTOFRANGE,"Can only specify 1, 2 or 3 eigenvectors");
643: chaco->eignum = num;
644: }
645: return(0);
646: }
650: /*@
651: MatPartitioningChacoGetEigenNumber - Gets the number of eigenvectors used by Chaco.
653: Not Collective
655: Input Parameter:
656: . part - the partitioning context
658: Output Parameter:
659: . num - number of eigenvectors
661: Level: advanced
663: .seealso: MatPartitioningChacoSetEigenNumber()
664: @*/
665: PetscErrorCode MatPartitioningChacoGetEigenNumber(MatPartitioning part,PetscInt *num)
666: {
672: PetscTryMethod(part,"MatPartitioningChacoGetEigenNumber_C",(MatPartitioning,PetscInt*),(part,num));
673: return(0);
674: }
678: PetscErrorCode MatPartitioningChacoGetEigenNumber_Chaco(MatPartitioning part,PetscInt *num)
679: {
680: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
683: *num = chaco->eignum;
684: return(0);
685: }
689: PetscErrorCode MatPartitioningSetFromOptions_Chaco(MatPartitioning part)
690: {
691: PetscErrorCode ierr;
692: PetscInt i;
693: PetscReal r;
694: PetscBool flag;
695: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*)part->data;
696: MPChacoGlobalType global;
697: MPChacoLocalType local;
698: MPChacoEigenType eigen;
701: PetscOptionsHead("Chaco partitioning options");
702: PetscOptionsEnum("-mat_partitioning_chaco_global","Global method","MatPartitioningChacoSetGlobal",MPChacoGlobalTypes,(PetscEnum)chaco->global_method,(PetscEnum*)&global,&flag);
703: if (flag) { MatPartitioningChacoSetGlobal(part,global); }
704: PetscOptionsEnum("-mat_partitioning_chaco_local","Local method","MatPartitioningChacoSetLocal",MPChacoLocalTypes,(PetscEnum)chaco->local_method,(PetscEnum*)&local,&flag);
705: if (flag) { MatPartitioningChacoSetLocal(part,local); }
706: PetscOptionsReal("-mat_partitioning_chaco_coarse","Coarse level","MatPartitioningChacoSetCoarseLevel",0.0,&r,&flag);
707: if (flag) { MatPartitioningChacoSetCoarseLevel(part,r); }
708: PetscOptionsEnum("-mat_partitioning_chaco_eigen_solver","Eigensolver method","MatPartitioningChacoSetEigenSolver",MPChacoEigenTypes,(PetscEnum)chaco->eigen_method,(PetscEnum*)&eigen,&flag);
709: if (flag) { MatPartitioningChacoSetEigenSolver(part,eigen); }
710: PetscOptionsReal("-mat_partitioning_chaco_eigen_tol","Eigensolver tolerance","MatPartitioningChacoSetEigenTol",chaco->eigtol,&r,&flag);
711: if (flag) { MatPartitioningChacoSetEigenTol(part,r); }
712: PetscOptionsInt("-mat_partitioning_chaco_eigen_number","Number of eigenvectors: 1, 2, or 3 (bi-, quadri-, or octosection)","MatPartitioningChacoSetEigenNumber",chaco->eignum,&i,&flag);
713: if (flag) { MatPartitioningChacoSetEigenNumber(part,i); }
714: PetscOptionsBool("-mat_partitioning_chaco_verbose","Show library output","",chaco->verbose,&chaco->verbose,NULL);
715: PetscOptionsTail();
716: return(0);
717: }
721: PetscErrorCode MatPartitioningDestroy_Chaco(MatPartitioning part)
722: {
723: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco*) part->data;
724: PetscErrorCode ierr;
727: PetscFree(chaco);
728: /* clear composed functions */
729: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetGlobal_C",NULL);
730: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetGlobal_C",NULL);
731: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetLocal_C",NULL);
732: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetLocal_C",NULL);
733: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetCoarseLevel_C",NULL);
734: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetEigenSolver_C",NULL);
735: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetEigenSolver_C",NULL);
736: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetEigenTol_C",NULL);
737: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetEigenTol_C",NULL);
738: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetEigenNumber_C",NULL);
739: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetEigenNumber_C",NULL);
740: return(0);
741: }
743: /*MC
744: MATPARTITIONINGCHACO - Creates a partitioning context via the external package Chaco.
746: Level: beginner
748: Notes: See http://www.cs.sandia.gov/CRF/chac.html
750: .keywords: Partitioning, create, context
752: .seealso: MatPartitioningSetType(), MatPartitioningType
753: M*/
757: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Chaco(MatPartitioning part)
758: {
759: PetscErrorCode ierr;
760: MatPartitioning_Chaco *chaco;
763: PetscNewLog(part,MatPartitioning_Chaco,&chaco);
764: part->data = (void*)chaco;
766: chaco->global_method = MP_CHACO_MULTILEVEL;
767: chaco->local_method = MP_CHACO_KERNIGHAN;
768: chaco->eigen_method = MP_CHACO_LANCZOS;
769: chaco->nbvtxcoarsed = 200;
770: chaco->eignum = 1;
771: chaco->eigtol = 0.001;
772: chaco->verbose = PETSC_FALSE;
774: part->ops->apply = MatPartitioningApply_Chaco;
775: part->ops->view = MatPartitioningView_Chaco;
776: part->ops->destroy = MatPartitioningDestroy_Chaco;
777: part->ops->setfromoptions = MatPartitioningSetFromOptions_Chaco;
779: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetGlobal_C",MatPartitioningChacoSetGlobal_Chaco);
780: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetGlobal_C",MatPartitioningChacoGetGlobal_Chaco);
781: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetLocal_C",MatPartitioningChacoSetLocal_Chaco);
782: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetLocal_C",MatPartitioningChacoGetLocal_Chaco);
783: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetCoarseLevel_C",MatPartitioningChacoSetCoarseLevel_Chaco);
784: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetEigenSolver_C",MatPartitioningChacoSetEigenSolver_Chaco);
785: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetEigenSolver_C",MatPartitioningChacoGetEigenSolver_Chaco);
786: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetEigenTol_C",MatPartitioningChacoSetEigenTol_Chaco);
787: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetEigenTol_C",MatPartitioningChacoGetEigenTol_Chaco);
788: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoSetEigenNumber_C",MatPartitioningChacoSetEigenNumber_Chaco);
789: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningChacoGetEigenNumber_C",MatPartitioningChacoGetEigenNumber_Chaco);
790: return(0);
791: }