Actual source code: pdvec.c


  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
  6: #include <petsc/private/viewerhdf5impl.h>
  7: #include <petsc/private/glvisviewerimpl.h>
  8: #include <petsc/private/glvisvecimpl.h>

 10: PetscErrorCode VecDestroy_MPI(Vec v)
 11: {
 12:   Vec_MPI        *x = (Vec_MPI*)v->data;

 16: #if defined(PETSC_USE_LOG)
 17:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 18: #endif
 19:   if (!x) return(0);
 20:   PetscFree(x->array_allocated);

 22:   /* Destroy local representation of vector if it exists */
 23:   if (x->localrep) {
 24:     VecDestroy(&x->localrep);
 25:     VecScatterDestroy(&x->localupdate);
 26:   }
 27:   VecAssemblyReset_MPI(v);

 29:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 30:   VecStashDestroy_Private(&v->bstash);
 31:   VecStashDestroy_Private(&v->stash);
 32:   PetscFree(v->data);
 33:   return(0);
 34: }

 36: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 37: {
 38:   PetscErrorCode    ierr;
 39:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 40:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 41:   MPI_Status        status;
 42:   PetscScalar       *values;
 43:   const PetscScalar *xarray;
 44:   const char        *name;
 45:   PetscViewerFormat format;

 48:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
 49:   PetscViewerGetFormat(viewer,&format);
 50:   if (format == PETSC_VIEWER_LOAD_BALANCE) {
 51:     PetscInt nmax = 0,nmin = xin->map->n,navg;
 52:     for (i=0; i<(PetscInt)size; i++) {
 53:       nmax = PetscMax(nmax,xin->map->range[i+1] - xin->map->range[i]);
 54:       nmin = PetscMin(nmin,xin->map->range[i+1] - xin->map->range[i]);
 55:     }
 56:     navg = xin->map->N/size;
 57:     PetscViewerASCIIPrintf(viewer,"  Load Balance - Local vector size Min %D  avg %D  max %D\n",nmin,navg,nmax);
 58:     return(0);
 59:   }

 61:   VecGetArrayRead(xin,&xarray);
 62:   /* determine maximum message to arrive */
 63:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
 64:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
 65:   if (format == PETSC_VIEWER_ASCII_GLVIS) { rank = 0, len = 0; } /* no parallel distributed write support from GLVis */
 66:   if (rank == 0) {
 67:     PetscMalloc1(len,&values);
 68:     /*
 69:         MATLAB format and ASCII format are very similar except
 70:         MATLAB uses %18.16e format while ASCII uses %g
 71:     */
 72:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 73:       PetscObjectGetName((PetscObject)xin,&name);
 74:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 75:       for (i=0; i<xin->map->n; i++) {
 76: #if defined(PETSC_USE_COMPLEX)
 77:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 78:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
 79:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 80:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
 81:         } else {
 82:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
 83:         }
 84: #else
 85:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
 86: #endif
 87:       }
 88:       /* receive and print messages */
 89:       for (j=1; j<size; j++) {
 90:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
 91:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 92:         for (i=0; i<n; i++) {
 93: #if defined(PETSC_USE_COMPLEX)
 94:           if (PetscImaginaryPart(values[i]) > 0.0) {
 95:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
 96:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 97:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
 98:           } else {
 99:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
100:           }
101: #else
102:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
103: #endif
104:         }
105:       }
106:       PetscViewerASCIIPrintf(viewer,"];\n");

108:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
109:       for (i=0; i<xin->map->n; i++) {
110: #if defined(PETSC_USE_COMPLEX)
111:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
112: #else
113:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
114: #endif
115:       }
116:       /* receive and print messages */
117:       for (j=1; j<size; j++) {
118:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
119:         MPI_Get_count(&status,MPIU_SCALAR,&n);
120:         for (i=0; i<n; i++) {
121: #if defined(PETSC_USE_COMPLEX)
122:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
123: #else
124:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
125: #endif
126:         }
127:       }
128:     } else if (format == PETSC_VIEWER_ASCII_VTK_DEPRECATED || format == PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED) {
129:       /*
130:         state 0: No header has been output
131:         state 1: Only POINT_DATA has been output
132:         state 2: Only CELL_DATA has been output
133:         state 3: Output both, POINT_DATA last
134:         state 4: Output both, CELL_DATA last
135:       */
136:       static PetscInt stateId     = -1;
137:       int             outputState = 0;
138:       int             doOutput    = 0;
139:       PetscBool       hasState;
140:       PetscInt        bs, b;

142:       if (stateId < 0) {
143:         PetscObjectComposedDataRegister(&stateId);
144:       }
145:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
146:       if (!hasState) outputState = 0;

148:       PetscObjectGetName((PetscObject)xin,&name);
149:       VecGetLocalSize(xin, &nLen);
150:       PetscMPIIntCast(nLen,&n);
151:       VecGetBlockSize(xin, &bs);
152:       if (format == PETSC_VIEWER_ASCII_VTK_DEPRECATED) {
153:         if (outputState == 0) {
154:           outputState = 1;
155:           doOutput    = 1;
156:         } else if (outputState == 1) doOutput = 0;
157:         else if (outputState == 2) {
158:           outputState = 3;
159:           doOutput    = 1;
160:         } else if (outputState == 3) doOutput = 0;
161:         else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");

163:         if (doOutput) {
164:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
165:         }
166:       } else {
167:         if (outputState == 0) {
168:           outputState = 2;
169:           doOutput    = 1;
170:         } else if (outputState == 1) {
171:           outputState = 4;
172:           doOutput    = 1;
173:         } else if (outputState == 2) doOutput = 0;
174:         else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
175:         else if (outputState == 4) doOutput = 0;

177:         if (doOutput) {
178:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
179:         }
180:       }
181:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
182:       if (name) {
183:         if (bs == 3) {
184:           PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
185:         } else {
186:           PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
187:         }
188:       } else {
189:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
190:       }
191:       if (bs != 3) {
192:         PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
193:       }
194:       for (i=0; i<n/bs; i++) {
195:         for (b=0; b<bs; b++) {
196:           if (b > 0) {
197:             PetscViewerASCIIPrintf(viewer," ");
198:           }
199:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
200:         }
201:         PetscViewerASCIIPrintf(viewer,"\n");
202:       }
203:       for (j=1; j<size; j++) {
204:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
205:         MPI_Get_count(&status,MPIU_SCALAR,&n);
206:         for (i=0; i<n/bs; i++) {
207:           for (b=0; b<bs; b++) {
208:             if (b > 0) {
209:               PetscViewerASCIIPrintf(viewer," ");
210:             }
211:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
212:           }
213:           PetscViewerASCIIPrintf(viewer,"\n");
214:         }
215:       }
216:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS_DEPRECATED) {
217:       PetscInt bs, b;

219:       VecGetLocalSize(xin, &nLen);
220:       PetscMPIIntCast(nLen,&n);
221:       VecGetBlockSize(xin, &bs);
222:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);

224:       for (i=0; i<n/bs; i++) {
225:         for (b=0; b<bs; b++) {
226:           if (b > 0) {
227:             PetscViewerASCIIPrintf(viewer," ");
228:           }
229:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
230:         }
231:         for (b=bs; b<3; b++) {
232:           PetscViewerASCIIPrintf(viewer," 0.0");
233:         }
234:         PetscViewerASCIIPrintf(viewer,"\n");
235:       }
236:       for (j=1; j<size; j++) {
237:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
238:         MPI_Get_count(&status,MPIU_SCALAR,&n);
239:         for (i=0; i<n/bs; i++) {
240:           for (b=0; b<bs; b++) {
241:             if (b > 0) {
242:               PetscViewerASCIIPrintf(viewer," ");
243:             }
244:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
245:           }
246:           for (b=bs; b<3; b++) {
247:             PetscViewerASCIIPrintf(viewer," 0.0");
248:           }
249:           PetscViewerASCIIPrintf(viewer,"\n");
250:         }
251:       }
252:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
253:       PetscInt bs, b, vertexCount = 1;

255:       VecGetLocalSize(xin, &nLen);
256:       PetscMPIIntCast(nLen,&n);
257:       VecGetBlockSize(xin, &bs);
258:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);

260:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
261:       for (i=0; i<n/bs; i++) {
262:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
263:         for (b=0; b<bs; b++) {
264:           if (b > 0) {
265:             PetscViewerASCIIPrintf(viewer," ");
266:           }
267: #if !defined(PETSC_USE_COMPLEX)
268:           PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
269: #endif
270:         }
271:         PetscViewerASCIIPrintf(viewer,"\n");
272:       }
273:       for (j=1; j<size; j++) {
274:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
275:         MPI_Get_count(&status,MPIU_SCALAR,&n);
276:         for (i=0; i<n/bs; i++) {
277:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
278:           for (b=0; b<bs; b++) {
279:             if (b > 0) {
280:               PetscViewerASCIIPrintf(viewer," ");
281:             }
282: #if !defined(PETSC_USE_COMPLEX)
283:             PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
284: #endif
285:           }
286:           PetscViewerASCIIPrintf(viewer,"\n");
287:         }
288:       }
289:     } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
290:       /* GLVis ASCII visualization/dump: this function mimics mfem::GridFunction::Save() */
291:       const PetscScalar       *array;
292:       PetscInt                i,n,vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
293:       PetscContainer          glvis_container;
294:       PetscViewerGLVisVecInfo glvis_vec_info;
295:       PetscViewerGLVisInfo    glvis_info;
296:       PetscErrorCode          ierr;

298:       /* mfem::FiniteElementSpace::Save() */
299:       VecGetBlockSize(xin,&vdim);
300:       PetscViewerASCIIPrintf(viewer,"FiniteElementSpace\n");
301:       PetscObjectQuery((PetscObject)xin,"_glvis_info_container",(PetscObject*)&glvis_container);
302:       if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_PLIB,"Missing GLVis container");
303:       PetscContainerGetPointer(glvis_container,(void**)&glvis_vec_info);
304:       PetscViewerASCIIPrintf(viewer,"%s\n",glvis_vec_info->fec_type);
305:       PetscViewerASCIIPrintf(viewer,"VDim: %d\n",vdim);
306:       PetscViewerASCIIPrintf(viewer,"Ordering: %d\n",ordering);
307:       PetscViewerASCIIPrintf(viewer,"\n");
308:       /* mfem::Vector::Print() */
309:       PetscObjectQuery((PetscObject)viewer,"_glvis_info_container",(PetscObject*)&glvis_container);
310:       if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_PLIB,"Missing GLVis container");
311:       PetscContainerGetPointer(glvis_container,(void**)&glvis_info);
312:       if (glvis_info->enabled) {
313:         VecGetLocalSize(xin,&n);
314:         VecGetArrayRead(xin,&array);
315:         for (i=0;i<n;i++) {
316:           PetscViewerASCIIPrintf(viewer,glvis_info->fmt,(double)PetscRealPart(array[i]));
317:           PetscViewerASCIIPrintf(viewer,"\n");
318:         }
319:         VecRestoreArrayRead(xin,&array);
320:       }
321:     } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
322:       /* No info */
323:     } else {
324:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
325:       cnt = 0;
326:       for (i=0; i<xin->map->n; i++) {
327:         if (format == PETSC_VIEWER_ASCII_INDEX) {
328:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
329:         }
330: #if defined(PETSC_USE_COMPLEX)
331:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
332:           PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
333:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
334:           PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
335:         } else {
336:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
337:         }
338: #else
339:         PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
340: #endif
341:       }
342:       /* receive and print messages */
343:       for (j=1; j<size; j++) {
344:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
345:         MPI_Get_count(&status,MPIU_SCALAR,&n);
346:         if (format != PETSC_VIEWER_ASCII_COMMON) {
347:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
348:         }
349:         for (i=0; i<n; i++) {
350:           if (format == PETSC_VIEWER_ASCII_INDEX) {
351:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
352:           }
353: #if defined(PETSC_USE_COMPLEX)
354:           if (PetscImaginaryPart(values[i]) > 0.0) {
355:             PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
356:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
357:             PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
358:           } else {
359:             PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
360:           }
361: #else
362:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
363: #endif
364:         }
365:       }
366:     }
367:     PetscFree(values);
368:   } else {
369:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
370:       /* Rank 0 is not trying to receive anything, so don't send anything */
371:     } else {
372:       if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK_DEPRECATED || format == PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED) {
373:         /* this may be a collective operation so make sure everyone calls it */
374:         PetscObjectGetName((PetscObject)xin,&name);
375:       }
376:       MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
377:     }
378:   }
379:   PetscViewerFlush(viewer);
380:   VecRestoreArrayRead(xin,&xarray);
381:   return(0);
382: }

384: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
385: {
386:   return VecView_Binary(xin,viewer);
387: }

389: #include <petscdraw.h>
390: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
391: {
392:   PetscDraw         draw;
393:   PetscBool         isnull;
394:   PetscDrawLG       lg;
395:   PetscMPIInt       i,size,rank,n,N,*lens = NULL,*disp = NULL;
396:   PetscReal         *values, *xx = NULL,*yy = NULL;
397:   const PetscScalar *xarray;
398:   int               colors[] = {PETSC_DRAW_RED};
399:   PetscErrorCode    ierr;

402:   PetscViewerDrawGetDraw(viewer,0,&draw);
403:   PetscDrawIsNull(draw,&isnull);
404:   if (isnull) return(0);
405:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
406:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
407:   PetscMPIIntCast(xin->map->n,&n);
408:   PetscMPIIntCast(xin->map->N,&N);

410:   VecGetArrayRead(xin,&xarray);
411: #if defined(PETSC_USE_COMPLEX)
412:   PetscMalloc1(n+1,&values);
413:   for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
414: #else
415:   values = (PetscReal*)xarray;
416: #endif
417:   if (rank == 0) {
418:     PetscMalloc2(N,&xx,N,&yy);
419:     for (i=0; i<N; i++) xx[i] = (PetscReal)i;
420:     PetscMalloc2(size,&lens,size,&disp);
421:     for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
422:     for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
423:   }
424:   MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
425:   PetscFree2(lens,disp);
426: #if defined(PETSC_USE_COMPLEX)
427:   PetscFree(values);
428: #endif
429:   VecRestoreArrayRead(xin,&xarray);

431:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
432:   PetscDrawLGReset(lg);
433:   PetscDrawLGSetDimension(lg,1);
434:   PetscDrawLGSetColors(lg,colors);
435:   if (rank == 0) {
436:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
437:     PetscFree2(xx,yy);
438:   }
439:   PetscDrawLGDraw(lg);
440:   PetscDrawLGSave(lg);
441:   return(0);
442: }

444: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
445: {
446:   PetscErrorCode    ierr;
447:   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
448:   PetscInt          i,start,end;
449:   MPI_Status        status;
450:   PetscReal         min,max,tmp = 0.0;
451:   PetscDraw         draw;
452:   PetscBool         isnull;
453:   PetscDrawAxis     axis;
454:   const PetscScalar *xarray;

457:   PetscViewerDrawGetDraw(viewer,0,&draw);
458:   PetscDrawIsNull(draw,&isnull);
459:   if (isnull) return(0);
460:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
461:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);

463:   VecMin(xin,NULL,&min);
464:   VecMax(xin,NULL,&max);
465:   if (min == max) {
466:     min -= 1.e-5;
467:     max += 1.e-5;
468:   }

470:   PetscDrawCheckResizedWindow(draw);
471:   PetscDrawClear(draw);

473:   PetscDrawAxisCreate(draw,&axis);
474:   PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
475:   PetscDrawAxisDraw(axis);
476:   PetscDrawAxisDestroy(&axis);

478:   /* draw local part of vector */
479:   VecGetArrayRead(xin,&xarray);
480:   VecGetOwnershipRange(xin,&start,&end);
481:   if (rank < size-1) { /* send value to right */
482:     MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
483:   }
484:   if (rank) { /* receive value from right */
485:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
486:   }
487:   PetscDrawCollectiveBegin(draw);
488:   if (rank) {
489:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
490:   }
491:   for (i=1; i<xin->map->n; i++) {
492:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
493:   }
494:   PetscDrawCollectiveEnd(draw);
495:   VecRestoreArrayRead(xin,&xarray);

497:   PetscDrawFlush(draw);
498:   PetscDrawPause(draw);
499:   PetscDrawSave(draw);
500:   return(0);
501: }

503: #if defined(PETSC_HAVE_MATLAB_ENGINE)
504: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
505: {
506:   PetscErrorCode    ierr;
507:   PetscMPIInt       rank,size,*lens;
508:   PetscInt          i,N = xin->map->N;
509:   const PetscScalar *xarray;
510:   PetscScalar       *xx;

513:   VecGetArrayRead(xin,&xarray);
514:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
515:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
516:   if (rank == 0) {
517:     PetscMalloc1(N,&xx);
518:     PetscMalloc1(size,&lens);
519:     for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];

521:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
522:     PetscFree(lens);

524:     PetscObjectName((PetscObject)xin);
525:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

527:     PetscFree(xx);
528:   } else {
529:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
530:   }
531:   VecRestoreArrayRead(xin,&xarray);
532:   return(0);
533: }
534: #endif

536: #if defined(PETSC_HAVE_ADIOS)
537: #include <adios.h>
538: #include <adios_read.h>
539: #include <petsc/private/vieweradiosimpl.h>
540: #include <petsc/private/viewerimpl.h>

542: PetscErrorCode VecView_MPI_ADIOS(Vec xin, PetscViewer viewer)
543: {
544:   PetscViewer_ADIOS *adios = (PetscViewer_ADIOS*)viewer->data;
545:   PetscErrorCode    ierr;
546:   const char        *vecname;
547:   int64_t           id;
548:   PetscInt          n,N,rstart;
549:   const PetscScalar *array;
550:   char              nglobalname[16],nlocalname[16],coffset[16];

553:   PetscObjectGetName((PetscObject) xin, &vecname);

555:   VecGetLocalSize(xin,&n);
556:   VecGetSize(xin,&N);
557:   VecGetOwnershipRange(xin,&rstart,NULL);

559:   sprintf(nlocalname,"%d",(int)n);
560:   sprintf(nglobalname,"%d",(int)N);
561:   sprintf(coffset,"%d",(int)rstart);
562:   id   = adios_define_var(Petsc_adios_group,vecname,"",adios_double,nlocalname,nglobalname,coffset);
563:   VecGetArrayRead(xin,&array);
564:   adios_write_byid(adios->adios_handle,id,array);
565:   VecRestoreArrayRead(xin,&array);

567:   return(0);
568: }
569: #endif

571: #if defined(PETSC_HAVE_HDF5)
572: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
573: {
574:   PetscViewer_HDF5  *hdf5 = (PetscViewer_HDF5*) viewer->data;
575:   /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
576:   hid_t             filespace; /* file dataspace identifier */
577:   hid_t             chunkspace; /* chunk dataset property identifier */
578:   hid_t             dset_id;   /* dataset identifier */
579:   hid_t             memspace;  /* memory dataspace identifier */
580:   hid_t             file_id;
581:   hid_t             group;
582:   hid_t             memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
583:   hid_t             filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
584:   PetscInt          bs = PetscAbs(xin->map->bs);
585:   hsize_t           dim;
586:   hsize_t           maxDims[4], dims[4], chunkDims[4], count[4], offset[4];
587:   PetscBool         timestepping, dim2, spoutput;
588:   PetscInt          timestep=PETSC_MIN_INT, low;
589:   hsize_t           chunksize;
590:   const PetscScalar *x;
591:   const char        *vecname;
592:   PetscErrorCode    ierr;

595:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
596:   PetscViewerHDF5IsTimestepping(viewer, &timestepping);
597:   if (timestepping) {
598:     PetscViewerHDF5GetTimestep(viewer, &timestep);
599:   }
600:   PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
601:   PetscViewerHDF5GetSPOutput(viewer,&spoutput);

603:   /* Create the dataspace for the dataset.
604:    *
605:    * dims - holds the current dimensions of the dataset
606:    *
607:    * maxDims - holds the maximum dimensions of the dataset (unlimited
608:    * for the number of time steps with the current dimensions for the
609:    * other dimensions; so only additional time steps can be added).
610:    *
611:    * chunkDims - holds the size of a single time step (required to
612:    * permit extending dataset).
613:    */
614:   dim = 0;
615:   chunksize = 1;
616:   if (timestep >= 0) {
617:     dims[dim]      = timestep+1;
618:     maxDims[dim]   = H5S_UNLIMITED;
619:     chunkDims[dim] = 1;
620:     ++dim;
621:   }
622:   PetscHDF5IntCast(xin->map->N/bs,dims + dim);

624:   maxDims[dim]   = dims[dim];
625:   chunkDims[dim] = PetscMax(1, dims[dim]);
626:   chunksize      *= chunkDims[dim];
627:   ++dim;
628:   if (bs > 1 || dim2) {
629:     dims[dim]      = bs;
630:     maxDims[dim]   = dims[dim];
631:     chunkDims[dim] = PetscMax(1, dims[dim]);
632:     chunksize      *= chunkDims[dim];
633:     ++dim;
634:   }
635: #if defined(PETSC_USE_COMPLEX)
636:   dims[dim]      = 2;
637:   maxDims[dim]   = dims[dim];
638:   chunkDims[dim] = PetscMax(1, dims[dim]);
639:   chunksize      *= chunkDims[dim];
640:   /* hdf5 chunks must be less than 4GB */
641:   if (chunksize > PETSC_HDF5_MAX_CHUNKSIZE/64) {
642:     if (bs > 1 || dim2) {
643:       if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128))) {
644:         chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128));
645:       } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128))) {
646:         chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128));
647:       }
648:     } else {
649:       chunkDims[dim-1] = PETSC_HDF5_MAX_CHUNKSIZE/128;
650:     }
651:   }
652:   ++dim;
653: #else
654:   /* hdf5 chunks must be less than 4GB */
655:   if (chunksize > PETSC_HDF5_MAX_CHUNKSIZE/64) {
656:     if (bs > 1 || dim2) {
657:       if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64))) {
658:         chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64));
659:       } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64))) {
660:         chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64));
661:       }
662:     } else {
663:       chunkDims[dim-1] = PETSC_HDF5_MAX_CHUNKSIZE/64;
664:     }
665:   }
666: #endif

668:   PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));

670: #if defined(PETSC_USE_REAL_SINGLE)
671:   memscalartype = H5T_NATIVE_FLOAT;
672:   filescalartype = H5T_NATIVE_FLOAT;
673: #elif defined(PETSC_USE_REAL___FLOAT128)
674: #error "HDF5 output with 128 bit floats not supported."
675: #elif defined(PETSC_USE_REAL___FP16)
676: #error "HDF5 output with 16 bit floats not supported."
677: #else
678:   memscalartype = H5T_NATIVE_DOUBLE;
679:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
680:   else filescalartype = H5T_NATIVE_DOUBLE;
681: #endif

683:   /* Create the dataset with default properties and close filespace */
684:   PetscObjectGetName((PetscObject) xin, &vecname);
685:   if (H5Lexists(group, vecname, H5P_DEFAULT) < 1) {
686:     /* Create chunk */
687:     PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
688:     PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));

690:     PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
691:     PetscStackCallHDF5(H5Pclose,(chunkspace));
692:   } else {
693:     PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
694:     PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
695:   }
696:   PetscStackCallHDF5(H5Sclose,(filespace));

698:   /* Each process defines a dataset and writes it to the hyperslab in the file */
699:   dim = 0;
700:   if (timestep >= 0) {
701:     count[dim] = 1;
702:     ++dim;
703:   }
704:   PetscHDF5IntCast(xin->map->n/bs,count + dim);
705:   ++dim;
706:   if (bs > 1 || dim2) {
707:     count[dim] = bs;
708:     ++dim;
709:   }
710: #if defined(PETSC_USE_COMPLEX)
711:   count[dim] = 2;
712:   ++dim;
713: #endif
714:   if (xin->map->n > 0 || H5_VERSION_GE(1,10,0)) {
715:     PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
716:   } else {
717:     /* Can't create dataspace with zero for any dimension, so create null dataspace. */
718:     PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
719:   }

721:   /* Select hyperslab in the file */
722:   VecGetOwnershipRange(xin, &low, NULL);
723:   dim  = 0;
724:   if (timestep >= 0) {
725:     offset[dim] = timestep;
726:     ++dim;
727:   }
728:   PetscHDF5IntCast(low/bs,offset + dim);
729:   ++dim;
730:   if (bs > 1 || dim2) {
731:     offset[dim] = 0;
732:     ++dim;
733:   }
734: #if defined(PETSC_USE_COMPLEX)
735:   offset[dim] = 0;
736:   ++dim;
737: #endif
738:   if (xin->map->n > 0 || H5_VERSION_GE(1,10,0)) {
739:     PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
740:     PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
741:   } else {
742:     /* Create null filespace to match null memspace. */
743:     PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
744:   }

746:   VecGetArrayRead(xin, &x);
747:   PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
748:   PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
749:   VecRestoreArrayRead(xin, &x);

751:   /* Close/release resources */
752:   PetscStackCallHDF5(H5Gclose,(group));
753:   PetscStackCallHDF5(H5Sclose,(filespace));
754:   PetscStackCallHDF5(H5Sclose,(memspace));
755:   PetscStackCallHDF5(H5Dclose,(dset_id));

757: #if defined(PETSC_USE_COMPLEX)
758:   {
759:     PetscBool   tru = PETSC_TRUE;
760:     PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru);
761:   }
762: #endif
763:   PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"timestepping",PETSC_BOOL,&timestepping);
764:   PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
765:   return(0);
766: }
767: #endif

769: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
770: {
772:   PetscBool      iascii,isbinary,isdraw;
773: #if defined(PETSC_HAVE_MATHEMATICA)
774:   PetscBool      ismathematica;
775: #endif
776: #if defined(PETSC_HAVE_HDF5)
777:   PetscBool      ishdf5;
778: #endif
779: #if defined(PETSC_HAVE_MATLAB_ENGINE)
780:   PetscBool      ismatlab;
781: #endif
782: #if defined(PETSC_HAVE_ADIOS)
783:   PetscBool      isadios;
784: #endif
785:   PetscBool      isglvis;

788:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
789:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
790:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
791: #if defined(PETSC_HAVE_MATHEMATICA)
792:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
793: #endif
794: #if defined(PETSC_HAVE_HDF5)
795:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
796: #endif
797: #if defined(PETSC_HAVE_MATLAB_ENGINE)
798:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
799: #endif
800:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
801: #if defined(PETSC_HAVE_ADIOS)
802:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS,&isadios);
803: #endif
804:   if (iascii) {
805:     VecView_MPI_ASCII(xin,viewer);
806:   } else if (isbinary) {
807:     VecView_MPI_Binary(xin,viewer);
808:   } else if (isdraw) {
809:     PetscViewerFormat format;
810:     PetscViewerGetFormat(viewer,&format);
811:     if (format == PETSC_VIEWER_DRAW_LG) {
812:       VecView_MPI_Draw_LG(xin,viewer);
813:     } else {
814:       VecView_MPI_Draw(xin,viewer);
815:     }
816: #if defined(PETSC_HAVE_MATHEMATICA)
817:   } else if (ismathematica) {
818:     PetscViewerMathematicaPutVector(viewer,xin);
819: #endif
820: #if defined(PETSC_HAVE_HDF5)
821:   } else if (ishdf5) {
822:     VecView_MPI_HDF5(xin,viewer);
823: #endif
824: #if defined(PETSC_HAVE_ADIOS)
825:   } else if (isadios) {
826:     VecView_MPI_ADIOS(xin,viewer);
827: #endif
828: #if defined(PETSC_HAVE_MATLAB_ENGINE)
829:   } else if (ismatlab) {
830:     VecView_MPI_Matlab(xin,viewer);
831: #endif
832:   } else if (isglvis) {
833:     VecView_GLVis(xin,viewer);
834:   }
835:   return(0);
836: }

838: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
839: {
841:   *N = xin->map->N;
842:   return(0);
843: }

845: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
846: {
847:   const PetscScalar *xx;
848:   PetscInt          i,tmp,start = xin->map->range[xin->stash.rank];
849:   PetscErrorCode    ierr;

852:   VecGetArrayRead(xin,&xx);
853:   for (i=0; i<ni; i++) {
854:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
855:     tmp = ix[i] - start;
856:     if (PetscUnlikelyDebug(tmp < 0 || tmp >= xin->map->n)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
857:     y[i] = xx[tmp];
858:   }
859:   VecRestoreArrayRead(xin,&xx);
860:   return(0);
861: }

863: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
864: {
866:   PetscMPIInt    rank    = xin->stash.rank;
867:   PetscInt       *owners = xin->map->range,start = owners[rank];
868:   PetscInt       end     = owners[rank+1],i,row;
869:   PetscScalar    *xx;

872:   if (PetscDefined(USE_DEBUG)) {
873:     if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
874:     else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
875:   }
876:   VecGetArray(xin,&xx);
877:   xin->stash.insertmode = addv;

879:   if (addv == INSERT_VALUES) {
880:     for (i=0; i<ni; i++) {
881:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
882:       if (PetscUnlikelyDebug(ix[i] < 0)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
883:       if ((row = ix[i]) >= start && row < end) {
884:         xx[row-start] = y[i];
885:       } else if (!xin->stash.donotstash) {
886:         if (PetscUnlikelyDebug(ix[i] >= xin->map->N)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
887:         VecStashValue_Private(&xin->stash,row,y[i]);
888:       }
889:     }
890:   } else {
891:     for (i=0; i<ni; i++) {
892:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
893:       if (PetscUnlikelyDebug(ix[i] < 0)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
894:       if ((row = ix[i]) >= start && row < end) {
895:         xx[row-start] += y[i];
896:       } else if (!xin->stash.donotstash) {
897:         if (PetscUnlikelyDebug(ix[i] >= xin->map->N)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
898:         VecStashValue_Private(&xin->stash,row,y[i]);
899:       }
900:     }
901:   }
902:   VecRestoreArray(xin,&xx);
903:   return(0);
904: }

906: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
907: {
908:   PetscMPIInt    rank    = xin->stash.rank;
909:   PetscInt       *owners = xin->map->range,start = owners[rank];
911:   PetscInt       end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
912:   PetscScalar    *xx,*y = (PetscScalar*)yin;

915:   VecGetArray(xin,&xx);
916:   if (PetscDefined(USE_DEBUG)) {
917:     if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
918:     else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
919:   }
920:   xin->stash.insertmode = addv;

922:   if (addv == INSERT_VALUES) {
923:     for (i=0; i<ni; i++) {
924:       if ((row = bs*ix[i]) >= start && row < end) {
925:         for (j=0; j<bs; j++) xx[row-start+j] = y[j];
926:       } else if (!xin->stash.donotstash) {
927:         if (ix[i] < 0) { y += bs; continue; }
928:         if (PetscUnlikelyDebug(ix[i] >= xin->map->N)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
929:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
930:       }
931:       y += bs;
932:     }
933:   } else {
934:     for (i=0; i<ni; i++) {
935:       if ((row = bs*ix[i]) >= start && row < end) {
936:         for (j=0; j<bs; j++) xx[row-start+j] += y[j];
937:       } else if (!xin->stash.donotstash) {
938:         if (ix[i] < 0) { y += bs; continue; }
939:         if (PetscUnlikelyDebug(ix[i] > xin->map->N)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
940:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
941:       }
942:       y += bs;
943:     }
944:   }
945:   VecRestoreArray(xin,&xx);
946:   return(0);
947: }

949: /*
950:    Since nsends or nreceives may be zero we add 1 in certain mallocs
951: to make sure we never malloc an empty one.
952: */
953: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
954: {
956:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
957:   PetscMPIInt    size;
958:   InsertMode     addv;
959:   MPI_Comm       comm;

962:   PetscObjectGetComm((PetscObject)xin,&comm);
963:   if (xin->stash.donotstash) return(0);

965:   MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
966:   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
967:   xin->stash.insertmode = addv; /* in case this processor had no cache */
968:   xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */

970:   VecGetBlockSize(xin,&bs);
971:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
972:   if (!xin->bstash.bowners && xin->map->bs != -1) {
973:     PetscMalloc1(size+1,&bowners);
974:     for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
975:     xin->bstash.bowners = bowners;
976:   } else bowners = xin->bstash.bowners;

978:   VecStashScatterBegin_Private(&xin->stash,owners);
979:   VecStashScatterBegin_Private(&xin->bstash,bowners);
980:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
981:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
982:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
983:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
984:   return(0);
985: }

987: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
988: {
990:   PetscInt       base,i,j,*row,flg,bs;
991:   PetscMPIInt    n;
992:   PetscScalar    *val,*vv,*array,*xarray;

995:   if (!vec->stash.donotstash) {
996:     VecGetArray(vec,&xarray);
997:     VecGetBlockSize(vec,&bs);
998:     base = vec->map->range[vec->stash.rank];

1000:     /* Process the stash */
1001:     while (1) {
1002:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1003:       if (!flg) break;
1004:       if (vec->stash.insertmode == ADD_VALUES) {
1005:         for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1006:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1007:         for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1008:       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1009:     }
1010:     VecStashScatterEnd_Private(&vec->stash);

1012:     /* now process the block-stash */
1013:     while (1) {
1014:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1015:       if (!flg) break;
1016:       for (i=0; i<n; i++) {
1017:         array = xarray+row[i]*bs-base;
1018:         vv    = val+i*bs;
1019:         if (vec->stash.insertmode == ADD_VALUES) {
1020:           for (j=0; j<bs; j++) array[j] += vv[j];
1021:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1022:           for (j=0; j<bs; j++) array[j] = vv[j];
1023:         } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1024:       }
1025:     }
1026:     VecStashScatterEnd_Private(&vec->bstash);
1027:     VecRestoreArray(vec,&xarray);
1028:   }
1029:   vec->stash.insertmode = NOT_SET_VALUES;
1030:   return(0);
1031: }