Actual source code: pdvec.c
petsc-3.4.2 2013-07-02
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/
6: #include <petscviewerhdf5.h>
10: PetscErrorCode VecDestroy_MPI(Vec v)
11: {
12: Vec_MPI *x = (Vec_MPI*)v->data;
16: #if defined(PETSC_USE_LOG)
17: PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
18: #endif
19: if (!x) return(0);
20: PetscFree(x->array_allocated);
22: /* Destroy local representation of vector if it exists */
23: if (x->localrep) {
24: VecDestroy(&x->localrep);
25: VecScatterDestroy(&x->localupdate);
26: }
27: /* Destroy the stashes: note the order - so that the tags are freed properly */
28: VecStashDestroy_Private(&v->bstash);
29: VecStashDestroy_Private(&v->stash);
30: PetscFree(v->data);
31: return(0);
32: }
36: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
37: {
38: PetscErrorCode ierr;
39: PetscInt i,work = xin->map->n,cnt,len,nLen;
40: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
41: MPI_Status status;
42: PetscScalar *values;
43: const PetscScalar *xarray;
44: const char *name;
45: PetscViewerFormat format;
48: VecGetArrayRead(xin,&xarray);
49: /* determine maximum message to arrive */
50: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
51: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
52: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
54: if (!rank) {
55: PetscMalloc(len*sizeof(PetscScalar),&values);
56: PetscViewerGetFormat(viewer,&format);
57: /*
58: MATLAB format and ASCII format are very similar except
59: MATLAB uses %18.16e format while ASCII uses %g
60: */
61: if (format == PETSC_VIEWER_ASCII_MATLAB) {
62: PetscObjectGetName((PetscObject)xin,&name);
63: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
64: for (i=0; i<xin->map->n; i++) {
65: #if defined(PETSC_USE_COMPLEX)
66: if (PetscImaginaryPart(xarray[i]) > 0.0) {
67: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
68: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
69: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
70: } else {
71: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
72: }
73: #else
74: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
75: #endif
76: }
77: /* receive and print messages */
78: for (j=1; j<size; j++) {
79: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
80: MPI_Get_count(&status,MPIU_SCALAR,&n);
81: for (i=0; i<n; i++) {
82: #if defined(PETSC_USE_COMPLEX)
83: if (PetscImaginaryPart(values[i]) > 0.0) {
84: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
85: } else if (PetscImaginaryPart(values[i]) < 0.0) {
86: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
87: } else {
88: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
89: }
90: #else
91: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
92: #endif
93: }
94: }
95: PetscViewerASCIIPrintf(viewer,"];\n");
97: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
98: for (i=0; i<xin->map->n; i++) {
99: #if defined(PETSC_USE_COMPLEX)
100: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
101: #else
102: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
103: #endif
104: }
105: /* receive and print messages */
106: for (j=1; j<size; j++) {
107: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
108: MPI_Get_count(&status,MPIU_SCALAR,&n);
109: for (i=0; i<n; i++) {
110: #if defined(PETSC_USE_COMPLEX)
111: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
112: #else
113: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
114: #endif
115: }
116: }
117: } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
118: /*
119: state 0: No header has been output
120: state 1: Only POINT_DATA has been output
121: state 2: Only CELL_DATA has been output
122: state 3: Output both, POINT_DATA last
123: state 4: Output both, CELL_DATA last
124: */
125: static PetscInt stateId = -1;
126: int outputState = 0;
127: int doOutput = 0;
128: PetscBool hasState;
129: PetscInt bs, b;
131: if (stateId < 0) {
132: PetscObjectComposedDataRegister(&stateId);
133: }
134: PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
135: if (!hasState) outputState = 0;
137: PetscObjectGetName((PetscObject) xin, &name);
138: VecGetLocalSize(xin, &nLen);
139: PetscMPIIntCast(nLen,&n);
140: VecGetBlockSize(xin, &bs);
141: if (format == PETSC_VIEWER_ASCII_VTK) {
142: if (outputState == 0) {
143: outputState = 1;
144: doOutput = 1;
145: } else if (outputState == 1) doOutput = 0;
146: else if (outputState == 2) {
147: outputState = 3;
148: doOutput = 1;
149: } else if (outputState == 3) doOutput = 0;
150: else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
152: if (doOutput) {
153: PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
154: }
155: } else {
156: if (outputState == 0) {
157: outputState = 2;
158: doOutput = 1;
159: } else if (outputState == 1) {
160: outputState = 4;
161: doOutput = 1;
162: } else if (outputState == 2) doOutput = 0;
163: else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
164: else if (outputState == 4) doOutput = 0;
166: if (doOutput) {
167: PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
168: }
169: }
170: PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
171: if (name) {
172: if (bs == 3) {
173: PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
174: } else {
175: PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
176: }
177: } else {
178: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
179: }
180: if (bs != 3) {
181: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
182: }
183: for (i=0; i<n/bs; i++) {
184: for (b=0; b<bs; b++) {
185: if (b > 0) {
186: PetscViewerASCIIPrintf(viewer," ");
187: }
188: PetscViewerASCIIPrintf(viewer,"%g",PetscRealPart(xarray[i*bs+b]));
189: }
190: PetscViewerASCIIPrintf(viewer,"\n");
191: }
192: for (j=1; j<size; j++) {
193: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
194: MPI_Get_count(&status,MPIU_SCALAR,&n);
195: for (i=0; i<n/bs; i++) {
196: for (b=0; b<bs; b++) {
197: if (b > 0) {
198: PetscViewerASCIIPrintf(viewer," ");
199: }
200: PetscViewerASCIIPrintf(viewer,"%g",PetscRealPart(values[i*bs+b]));
201: }
202: PetscViewerASCIIPrintf(viewer,"\n");
203: }
204: }
205: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
206: PetscInt bs, b;
208: VecGetLocalSize(xin, &nLen);
209: PetscMPIIntCast(nLen,&n);
210: VecGetBlockSize(xin, &bs);
211: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
213: for (i=0; i<n/bs; i++) {
214: for (b=0; b<bs; b++) {
215: if (b > 0) {
216: PetscViewerASCIIPrintf(viewer," ");
217: }
218: PetscViewerASCIIPrintf(viewer,"%g",PetscRealPart(xarray[i*bs+b]));
219: }
220: for (b=bs; b<3; b++) {
221: PetscViewerASCIIPrintf(viewer," 0.0");
222: }
223: PetscViewerASCIIPrintf(viewer,"\n");
224: }
225: for (j=1; j<size; j++) {
226: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
227: MPI_Get_count(&status,MPIU_SCALAR,&n);
228: for (i=0; i<n/bs; i++) {
229: for (b=0; b<bs; b++) {
230: if (b > 0) {
231: PetscViewerASCIIPrintf(viewer," ");
232: }
233: PetscViewerASCIIPrintf(viewer,"%g",PetscRealPart(values[i*bs+b]));
234: }
235: for (b=bs; b<3; b++) {
236: PetscViewerASCIIPrintf(viewer," 0.0");
237: }
238: PetscViewerASCIIPrintf(viewer,"\n");
239: }
240: }
241: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
242: PetscInt bs, b, vertexCount = 1;
244: VecGetLocalSize(xin, &nLen);
245: PetscMPIIntCast(nLen,&n);
246: VecGetBlockSize(xin, &bs);
247: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
249: PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
250: for (i=0; i<n/bs; i++) {
251: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
252: for (b=0; b<bs; b++) {
253: if (b > 0) {
254: PetscViewerASCIIPrintf(viewer," ");
255: }
256: #if !defined(PETSC_USE_COMPLEX)
257: PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
258: #endif
259: }
260: PetscViewerASCIIPrintf(viewer,"\n");
261: }
262: for (j=1; j<size; j++) {
263: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
264: MPI_Get_count(&status,MPIU_SCALAR,&n);
265: for (i=0; i<n/bs; i++) {
266: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
267: for (b=0; b<bs; b++) {
268: if (b > 0) {
269: PetscViewerASCIIPrintf(viewer," ");
270: }
271: #if !defined(PETSC_USE_COMPLEX)
272: PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
273: #endif
274: }
275: PetscViewerASCIIPrintf(viewer,"\n");
276: }
277: }
278: } else {
279: PetscObjectPrintClassNamePrefixType((PetscObject)xin,viewer,"Vector Object");
280: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
281: cnt = 0;
282: for (i=0; i<xin->map->n; i++) {
283: if (format == PETSC_VIEWER_ASCII_INDEX) {
284: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
285: }
286: #if defined(PETSC_USE_COMPLEX)
287: if (PetscImaginaryPart(xarray[i]) > 0.0) {
288: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
289: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
290: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
291: } else {
292: PetscViewerASCIIPrintf(viewer,"%g\n",PetscRealPart(xarray[i]));
293: }
294: #else
295: PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
296: #endif
297: }
298: /* receive and print messages */
299: for (j=1; j<size; j++) {
300: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
301: MPI_Get_count(&status,MPIU_SCALAR,&n);
302: if (format != PETSC_VIEWER_ASCII_COMMON) {
303: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
304: }
305: for (i=0; i<n; i++) {
306: if (format == PETSC_VIEWER_ASCII_INDEX) {
307: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
308: }
309: #if defined(PETSC_USE_COMPLEX)
310: if (PetscImaginaryPart(values[i]) > 0.0) {
311: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
312: } else if (PetscImaginaryPart(values[i]) < 0.0) {
313: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
314: } else {
315: PetscViewerASCIIPrintf(viewer,"%g\n",PetscRealPart(values[i]));
316: }
317: #else
318: PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
319: #endif
320: }
321: }
322: }
323: PetscFree(values);
324: } else {
325: PetscViewerGetFormat(viewer,&format);
326: if (format == PETSC_VIEWER_ASCII_MATLAB) {
327: /* this may be a collective operation so make sure everyone calls it */
328: PetscObjectGetName((PetscObject)xin,&name);
329: }
330: /* send values */
331: MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
332: }
333: PetscViewerFlush(viewer);
334: VecRestoreArrayRead(xin,&xarray);
335: return(0);
336: }
340: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
341: {
342: PetscErrorCode ierr;
343: PetscMPIInt rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
344: PetscInt len,n = xin->map->n,j,tr[2];
345: int fdes;
346: MPI_Status status;
347: PetscScalar *values;
348: const PetscScalar *xarray;
349: FILE *file;
350: #if defined(PETSC_HAVE_MPIIO)
351: PetscBool isMPIIO;
352: #endif
353: PetscBool skipHeader;
354: PetscInt message_count,flowcontrolcount;
355: PetscViewerFormat format;
358: VecGetArrayRead(xin,&xarray);
359: PetscViewerBinaryGetDescriptor(viewer,&fdes);
360: PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);
362: /* determine maximum message to arrive */
363: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
364: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
366: if (!skipHeader) {
367: tr[0] = VEC_FILE_CLASSID;
368: tr[1] = xin->map->N;
369: PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
370: }
372: #if defined(PETSC_HAVE_MPIIO)
373: PetscViewerBinaryGetMPIIO(viewer,&isMPIIO);
374: if (!isMPIIO) {
375: #endif
376: PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
377: if (!rank) {
378: PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);
380: len = 0;
381: for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
382: PetscMalloc(len*sizeof(PetscScalar),&values);
383: PetscMPIIntCast(len,&mesgsize);
384: /* receive and save messages */
385: for (j=1; j<size; j++) {
386: PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
387: MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
388: MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
389: n = (PetscInt)mesglen;
390: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
391: }
392: PetscViewerFlowControlEndMaster(viewer,&message_count);
393: PetscFree(values);
394: } else {
395: PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
396: PetscMPIIntCast(xin->map->n,&mesgsize);
397: MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
398: PetscViewerFlowControlEndWorker(viewer,&message_count);
399: }
400: PetscViewerGetFormat(viewer,&format);
401: if (format == PETSC_VIEWER_BINARY_MATLAB) {
402: MPI_Comm comm;
403: FILE *info;
404: const char *name;
406: PetscObjectGetName((PetscObject)xin,&name);
407: PetscObjectGetComm((PetscObject)viewer,&comm);
408: PetscViewerBinaryGetInfoPointer(viewer,&info);
409: PetscFPrintf(comm,info,"%%--- begin code written by PetscViewerBinary for MATLAB format ---%\n");
410: PetscFPrintf(comm,info,"%%$$ Set.%s = PetscBinaryRead(fd);\n",name);
411: PetscFPrintf(comm,info,"%%--- end code written by PetscViewerBinary for MATLAB format ---%\n\n");
412: }
413: #if defined(PETSC_HAVE_MPIIO)
414: } else {
415: MPI_Offset off;
416: MPI_File mfdes;
417: PetscMPIInt lsize;
419: PetscMPIIntCast(n,&lsize);
420: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
421: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
422: off += xin->map->rstart*sizeof(PetscScalar); /* off is MPI_Offset, not PetscMPIInt */
423: MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
424: MPIU_File_write_all(mfdes,(void*)xarray,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
425: PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
426: }
427: #endif
429: VecRestoreArrayRead(xin,&xarray);
430: if (!rank) {
431: PetscViewerBinaryGetInfoPointer(viewer,&file);
432: if (file) {
433: if (((PetscObject)xin)->prefix) {
434: PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,xin->map->bs);
435: } else {
436: PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map->bs);
437: }
438: }
439: }
440: return(0);
441: }
443: #include <petscdraw.h>
446: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
447: {
448: PetscDraw draw;
449: PetscBool isnull;
452: #if defined(PETSC_USE_64BIT_INDICES)
454: PetscViewerDrawGetDraw(viewer,0,&draw);
455: PetscDrawIsNull(draw,&isnull);
456: if (isnull) return(0);
457: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported with 64 bit integers");
458: #else
459: PetscMPIInt size,rank;
460: int i,N = xin->map->N,*lens;
461: PetscReal *xx,*yy;
462: PetscDrawLG lg;
463: const PetscScalar *xarray;
466: PetscViewerDrawGetDraw(viewer,0,&draw);
467: PetscDrawIsNull(draw,&isnull);
468: if (isnull) return(0);
470: VecGetArrayRead(xin,&xarray);
471: PetscViewerDrawGetDrawLG(viewer,0,&lg);
472: PetscDrawCheckResizedWindow(draw);
473: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
474: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
475: if (!rank) {
476: PetscDrawLGReset(lg);
477: PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
478: for (i=0; i<N; i++) xx[i] = (PetscReal) i;
479: yy = xx + N;
480: PetscMalloc(size*sizeof(PetscInt),&lens);
481: for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];
482:
483: #if !defined(PETSC_USE_COMPLEX)
484: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
485: #else
486: {
487: PetscReal *xr;
488: PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
489: for (i=0; i<xin->map->n; i++) xr[i] = PetscRealPart(xarray[i]);
491: MPI_Gatherv(xr,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
492: PetscFree(xr);
493: }
494: #endif
495: PetscFree(lens);
496: PetscDrawLGAddPoints(lg,N,&xx,&yy);
497: PetscFree(xx);
498: } else {
499: #if !defined(PETSC_USE_COMPLEX)
500: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
501: #else
502: {
503: PetscReal *xr;
504: PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
505: for (i=0; i<xin->map->n; i++) xr[i] = PetscRealPart(xarray[i]);
507: MPI_Gatherv(xr,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
508: PetscFree(xr);
509: }
510: #endif
511: }
512: PetscDrawLGDraw(lg);
513: PetscDrawSynchronizedFlush(draw);
514: VecRestoreArrayRead(xin,&xarray);
515: #endif
516: return(0);
517: }
521: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
522: {
523: PetscErrorCode ierr;
524: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
525: PetscInt i,start,end;
526: MPI_Status status;
527: PetscReal coors[4],ymin,ymax,xmin,xmax,tmp;
528: PetscDraw draw;
529: PetscBool isnull;
530: PetscDrawAxis axis;
531: const PetscScalar *xarray;
534: PetscViewerDrawGetDraw(viewer,0,&draw);
535: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
537: VecGetArrayRead(xin,&xarray);
538: PetscDrawCheckResizedWindow(draw);
539: xmin = 1.e20; xmax = -1.e20;
540: for (i=0; i<xin->map->n; i++) {
541: #if defined(PETSC_USE_COMPLEX)
542: if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
543: if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
544: #else
545: if (xarray[i] < xmin) xmin = xarray[i];
546: if (xarray[i] > xmax) xmax = xarray[i];
547: #endif
548: }
549: if (xmin + 1.e-10 > xmax) {
550: xmin -= 1.e-5;
551: xmax += 1.e-5;
552: }
553: MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPIU_MIN,0,PetscObjectComm((PetscObject)xin));
554: MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPIU_MAX,0,PetscObjectComm((PetscObject)xin));
555: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
556: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
557: PetscDrawAxisCreate(draw,&axis);
558: PetscLogObjectParent(draw,axis);
559: if (!rank) {
560: PetscDrawClear(draw);
561: PetscDrawFlush(draw);
562: PetscDrawAxisSetLimits(axis,0.0,(double)xin->map->N,ymin,ymax);
563: PetscDrawAxisDraw(axis);
564: PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
565: }
566: PetscDrawAxisDestroy(&axis);
567: MPI_Bcast(coors,4,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
568: if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
569: /* draw local part of vector */
570: VecGetOwnershipRange(xin,&start,&end);
571: if (rank < size-1) { /*send value to right */
572: MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
573: }
574: for (i=1; i<xin->map->n; i++) {
575: #if !defined(PETSC_USE_COMPLEX)
576: PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),xarray[i],PETSC_DRAW_RED);
577: #else
578: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
579: #endif
580: }
581: if (rank) { /* receive value from right */
582: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
583: #if !defined(PETSC_USE_COMPLEX)
584: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
585: #else
586: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
587: #endif
588: }
589: PetscDrawSynchronizedFlush(draw);
590: PetscDrawPause(draw);
591: VecRestoreArrayRead(xin,&xarray);
592: return(0);
593: }
595: #if defined(PETSC_HAVE_MATLAB_ENGINE)
598: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
599: {
600: PetscErrorCode ierr;
601: PetscMPIInt rank,size,*lens;
602: PetscInt i,N = xin->map->N;
603: const PetscScalar *xarray;
604: PetscScalar *xx;
607: VecGetArrayRead(xin,&xarray);
608: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
609: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
610: if (!rank) {
611: PetscMalloc(N*sizeof(PetscScalar),&xx);
612: PetscMalloc(size*sizeof(PetscMPIInt),&lens);
613: for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];
615: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
616: PetscFree(lens);
618: PetscObjectName((PetscObject)xin);
619: PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);
621: PetscFree(xx);
622: } else {
623: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
624: }
625: VecRestoreArrayRead(xin,&xarray);
626: return(0);
627: }
628: #endif
630: #if defined(PETSC_HAVE_HDF5)
633: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
634: {
635: /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
636: hid_t filespace; /* file dataspace identifier */
637: hid_t chunkspace; /* chunk dataset property identifier */
638: hid_t plist_id; /* property list identifier */
639: hid_t dset_id; /* dataset identifier */
640: hid_t memspace; /* memory dataspace identifier */
641: hid_t file_id;
642: hid_t group;
643: hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
644: herr_t status;
645: PetscInt bs = xin->map->bs > 0 ? xin->map->bs : 1;
646: hsize_t dim;
647: hsize_t maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
648: PetscInt timestep;
649: PetscInt low;
650: const PetscScalar *x;
651: const char *vecname;
652: PetscErrorCode ierr;
655: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
656: PetscViewerHDF5GetTimestep(viewer, ×tep);
658: /* Create the dataspace for the dataset.
659: *
660: * dims - holds the current dimensions of the dataset
661: *
662: * maxDims - holds the maximum dimensions of the dataset (unlimited
663: * for the number of time steps with the current dimensions for the
664: * other dimensions; so only additional time steps can be added).
665: *
666: * chunkDims - holds the size of a single time step (required to
667: * permit extending dataset).
668: */
669: dim = 0;
670: if (timestep >= 0) {
671: dims[dim] = timestep+1;
672: maxDims[dim] = H5S_UNLIMITED;
673: chunkDims[dim] = 1;
674: ++dim;
675: }
676: PetscHDF5IntCast(xin->map->N/bs,dims + dim);
678: maxDims[dim] = dims[dim];
679: chunkDims[dim] = dims[dim];
680: ++dim;
681: if (bs >= 1) {
682: dims[dim] = bs;
683: maxDims[dim] = dims[dim];
684: chunkDims[dim] = dims[dim];
685: ++dim;
686: }
687: #if defined(PETSC_USE_COMPLEX)
688: dims[dim] = 2;
689: maxDims[dim] = dims[dim];
690: chunkDims[dim] = dims[dim];
691: ++dim;
692: #endif
693: filespace = H5Screate_simple(dim, dims, maxDims);
694: if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");
696: #if defined(PETSC_USE_REAL_SINGLE)
697: scalartype = H5T_NATIVE_FLOAT;
698: #elif defined(PETSC_USE_REAL___FLOAT128)
699: #error "HDF5 output with 128 bit floats not supported."
700: #else
701: scalartype = H5T_NATIVE_DOUBLE;
702: #endif
704: /* Create the dataset with default properties and close filespace */
705: PetscObjectGetName((PetscObject) xin, &vecname);
706: if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
707: /* Create chunk */
708: chunkspace = H5Pcreate(H5P_DATASET_CREATE);
709: if (chunkspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
710: status = H5Pset_chunk(chunkspace, dim, chunkDims);CHKERRQ(status);
712: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
713: dset_id = H5Dcreate2(group, vecname, scalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT);
714: #else
715: dset_id = H5Dcreate(group, vecname, scalartype, filespace, H5P_DEFAULT);
716: #endif
717: if (dset_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dcreate2()");
718: status = H5Pclose(chunkspace);CHKERRQ(status);
719: } else {
720: dset_id = H5Dopen2(group, vecname, H5P_DEFAULT);
721: status = H5Dset_extent(dset_id, dims);CHKERRQ(status);
722: }
723: status = H5Sclose(filespace);CHKERRQ(status);
725: /* Each process defines a dataset and writes it to the hyperslab in the file */
726: dim = 0;
727: if (timestep >= 0) {
728: count[dim] = 1;
729: ++dim;
730: }
731: PetscHDF5IntCast(xin->map->n/bs,count + dim);
732: ++dim;
733: if (bs >= 1) {
734: count[dim] = bs;
735: ++dim;
736: }
737: #if defined(PETSC_USE_COMPLEX)
738: count[dim] = 2;
739: ++dim;
740: #endif
741: if (xin->map->n > 0) {
742: memspace = H5Screate_simple(dim, count, NULL);
743: if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");
744: } else {
745: /* Can't create dataspace with zero for any dimension, so create null dataspace. */
746: memspace = H5Screate(H5S_NULL);
747: if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate()");
748: }
750: /* Select hyperslab in the file */
751: VecGetOwnershipRange(xin, &low, NULL);
752: dim = 0;
753: if (timestep >= 0) {
754: offset[dim] = timestep;
755: ++dim;
756: }
757: PetscHDF5IntCast(low/bs,offset + dim);
758: ++dim;
759: if (bs >= 1) {
760: offset[dim] = 0;
761: ++dim;
762: }
763: #if defined(PETSC_USE_COMPLEX)
764: offset[dim] = 0;
765: ++dim;
766: #endif
767: if (xin->map->n > 0) {
768: filespace = H5Dget_space(dset_id);
769: if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dget_space()");
770: status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
771: } else {
772: /* Create null filespace to match null memspace. */
773: filespace = H5Screate(H5S_NULL);
774: if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate(H5S_NULL)");
775: }
777: /* Create property list for collective dataset write */
778: plist_id = H5Pcreate(H5P_DATASET_XFER);
779: if (plist_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
780: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
781: status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
782: #endif
783: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
785: VecGetArrayRead(xin, &x);
786: status = H5Dwrite(dset_id, scalartype, memspace, filespace, plist_id, x);CHKERRQ(status);
787: status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
788: VecRestoreArrayRead(xin, &x);
790: /* Close/release resources */
791: if (group != file_id) {
792: status = H5Gclose(group);CHKERRQ(status);
793: }
794: status = H5Pclose(plist_id);CHKERRQ(status);
795: status = H5Sclose(filespace);CHKERRQ(status);
796: status = H5Sclose(memspace);CHKERRQ(status);
797: status = H5Dclose(dset_id);CHKERRQ(status);
798: PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
799: return(0);
800: }
801: #endif
805: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
806: {
808: PetscBool iascii,isbinary,isdraw;
809: #if defined(PETSC_HAVE_MATHEMATICA)
810: PetscBool ismathematica;
811: #endif
812: #if defined(PETSC_HAVE_HDF5)
813: PetscBool ishdf5;
814: #endif
815: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
816: PetscBool ismatlab;
817: #endif
820: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
821: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
822: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
823: #if defined(PETSC_HAVE_MATHEMATICA)
824: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
825: #endif
826: #if defined(PETSC_HAVE_HDF5)
827: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
828: #endif
829: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
830: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
831: #endif
832: if (iascii) {
833: VecView_MPI_ASCII(xin,viewer);
834: } else if (isbinary) {
835: VecView_MPI_Binary(xin,viewer);
836: } else if (isdraw) {
837: PetscViewerFormat format;
839: PetscViewerGetFormat(viewer,&format);
840: if (format == PETSC_VIEWER_DRAW_LG) {
841: VecView_MPI_Draw_LG(xin,viewer);
842: } else {
843: VecView_MPI_Draw(xin,viewer);
844: }
845: #if defined(PETSC_HAVE_MATHEMATICA)
846: } else if (ismathematica) {
847: PetscViewerMathematicaPutVector(viewer,xin);
848: #endif
849: #if defined(PETSC_HAVE_HDF5)
850: } else if (ishdf5) {
851: VecView_MPI_HDF5(xin,viewer);
852: #endif
853: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
854: } else if (ismatlab) {
855: VecView_MPI_Matlab(xin,viewer);
856: #endif
857: }
858: return(0);
859: }
863: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
864: {
866: *N = xin->map->N;
867: return(0);
868: }
872: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
873: {
874: const PetscScalar *xx;
875: PetscInt i,tmp,start = xin->map->range[xin->stash.rank];
876: PetscErrorCode ierr;
879: VecGetArrayRead(xin,&xx);
880: for (i=0; i<ni; i++) {
881: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
882: tmp = ix[i] - start;
883: #if defined(PETSC_USE_DEBUG)
884: if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
885: #endif
886: y[i] = xx[tmp];
887: }
888: VecRestoreArrayRead(xin,&xx);
889: return(0);
890: }
894: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
895: {
897: PetscMPIInt rank = xin->stash.rank;
898: PetscInt *owners = xin->map->range,start = owners[rank];
899: PetscInt end = owners[rank+1],i,row;
900: PetscScalar *xx;
903: #if defined(PETSC_USE_DEBUG)
904: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
905: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
906: #endif
907: VecGetArray(xin,&xx);
908: xin->stash.insertmode = addv;
910: if (addv == INSERT_VALUES) {
911: for (i=0; i<ni; i++) {
912: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
913: #if defined(PETSC_USE_DEBUG)
914: if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
915: #endif
916: if ((row = ix[i]) >= start && row < end) {
917: xx[row-start] = y[i];
918: } else if (!xin->stash.donotstash) {
919: #if defined(PETSC_USE_DEBUG)
920: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
921: #endif
922: VecStashValue_Private(&xin->stash,row,y[i]);
923: }
924: }
925: } else {
926: for (i=0; i<ni; i++) {
927: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
928: #if defined(PETSC_USE_DEBUG)
929: if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
930: #endif
931: if ((row = ix[i]) >= start && row < end) {
932: xx[row-start] += y[i];
933: } else if (!xin->stash.donotstash) {
934: #if defined(PETSC_USE_DEBUG)
935: if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
936: #endif
937: VecStashValue_Private(&xin->stash,row,y[i]);
938: }
939: }
940: }
941: VecRestoreArray(xin,&xx);
942: return(0);
943: }
947: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
948: {
949: PetscMPIInt rank = xin->stash.rank;
950: PetscInt *owners = xin->map->range,start = owners[rank];
952: PetscInt end = owners[rank+1],i,row,bs = xin->map->bs,j;
953: PetscScalar *xx,*y = (PetscScalar*)yin;
956: VecGetArray(xin,&xx);
957: #if defined(PETSC_USE_DEBUG)
958: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
959: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
960: #endif
961: xin->stash.insertmode = addv;
963: if (addv == INSERT_VALUES) {
964: for (i=0; i<ni; i++) {
965: if ((row = bs*ix[i]) >= start && row < end) {
966: for (j=0; j<bs; j++) xx[row-start+j] = y[j];
967: } else if (!xin->stash.donotstash) {
968: if (ix[i] < 0) continue;
969: #if defined(PETSC_USE_DEBUG)
970: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
971: #endif
972: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
973: }
974: y += bs;
975: }
976: } else {
977: for (i=0; i<ni; i++) {
978: if ((row = bs*ix[i]) >= start && row < end) {
979: for (j=0; j<bs; j++) xx[row-start+j] += y[j];
980: } else if (!xin->stash.donotstash) {
981: if (ix[i] < 0) continue;
982: #if defined(PETSC_USE_DEBUG)
983: if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
984: #endif
985: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
986: }
987: y += bs;
988: }
989: }
990: VecRestoreArray(xin,&xx);
991: return(0);
992: }
994: /*
995: Since nsends or nreceives may be zero we add 1 in certain mallocs
996: to make sure we never malloc an empty one.
997: */
1000: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1001: {
1003: PetscInt *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1004: PetscMPIInt size;
1005: InsertMode addv;
1006: MPI_Comm comm;
1009: PetscObjectGetComm((PetscObject)xin,&comm);
1010: if (xin->stash.donotstash) return(0);
1012: MPI_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
1013: if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1014: xin->stash.insertmode = addv; /* in case this processor had no cache */
1016: bs = xin->map->bs;
1017: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
1018: if (!xin->bstash.bowners && xin->map->bs != -1) {
1019: PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
1020: for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
1021: xin->bstash.bowners = bowners;
1022: } else bowners = xin->bstash.bowners;
1024: VecStashScatterBegin_Private(&xin->stash,owners);
1025: VecStashScatterBegin_Private(&xin->bstash,bowners);
1026: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1027: PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1028: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1029: PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1030: return(0);
1031: }
1035: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1036: {
1038: PetscInt base,i,j,*row,flg,bs;
1039: PetscMPIInt n;
1040: PetscScalar *val,*vv,*array,*xarray;
1043: if (!vec->stash.donotstash) {
1044: VecGetArray(vec,&xarray);
1045: base = vec->map->range[vec->stash.rank];
1046: bs = vec->map->bs;
1048: /* Process the stash */
1049: while (1) {
1050: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1051: if (!flg) break;
1052: if (vec->stash.insertmode == ADD_VALUES) {
1053: for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1054: } else if (vec->stash.insertmode == INSERT_VALUES) {
1055: for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1056: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1057: }
1058: VecStashScatterEnd_Private(&vec->stash);
1060: /* now process the block-stash */
1061: while (1) {
1062: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1063: if (!flg) break;
1064: for (i=0; i<n; i++) {
1065: array = xarray+row[i]*bs-base;
1066: vv = val+i*bs;
1067: if (vec->stash.insertmode == ADD_VALUES) {
1068: for (j=0; j<bs; j++) array[j] += vv[j];
1069: } else if (vec->stash.insertmode == INSERT_VALUES) {
1070: for (j=0; j<bs; j++) array[j] = vv[j];
1071: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1072: }
1073: }
1074: VecStashScatterEnd_Private(&vec->bstash);
1075: VecRestoreArray(vec,&xarray);
1076: }
1077: vec->stash.insertmode = NOT_SET_VALUES;
1078: return(0);
1079: }